mirror of
https://github.com/oven-sh/bun
synced 2026-02-08 01:49:33 +00:00
Compare commits
144 Commits
codex/fix-
...
kai/spawn-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ba36ce91b | ||
|
|
bf0b18a2e8 | ||
|
|
d4ccba67f2 | ||
|
|
43777cffee | ||
|
|
3aedf0692c | ||
|
|
346e97dde2 | ||
|
|
aa37ecb7a5 | ||
|
|
9811a2b53e | ||
|
|
b9e72d0d2e | ||
|
|
b7d4b14b3d | ||
|
|
59e1320fb1 | ||
|
|
e402adaebf | ||
|
|
3773ceeb7e | ||
|
|
162a9b66d8 | ||
|
|
6274f10096 | ||
|
|
978540902c | ||
|
|
b99a1256ff | ||
|
|
8a1d8047f1 | ||
|
|
a473657adb | ||
|
|
775c3b1987 | ||
|
|
7ee98852c6 | ||
|
|
f46df399eb | ||
|
|
c103b57bcc | ||
|
|
3b3cde9e74 | ||
|
|
2482af60d5 | ||
|
|
2245b5efd6 | ||
|
|
155475693b | ||
|
|
f5b42e1507 | ||
|
|
139f2b23a2 | ||
|
|
28006d0ad4 | ||
|
|
c44515eaaf | ||
|
|
e0924ef226 | ||
|
|
9499f21518 | ||
|
|
6b4662ff55 | ||
|
|
a445b45e55 | ||
|
|
82b34bbbdd | ||
|
|
4d905123fa | ||
|
|
c6deb4527c | ||
|
|
f3bca62a77 | ||
|
|
62794850fa | ||
|
|
f53aff0935 | ||
|
|
9c5797e2f5 | ||
|
|
4329a66a1d | ||
|
|
12a4b95b34 | ||
|
|
cf00cb495c | ||
|
|
5763a8e533 | ||
|
|
dedd433cbf | ||
|
|
d6590c4bfa | ||
|
|
07d3d6c9f6 | ||
|
|
631e674842 | ||
|
|
3d19c1156c | ||
|
|
7a069d7214 | ||
|
|
6ebad50543 | ||
|
|
8750f0b884 | ||
|
|
c38bace86c | ||
|
|
9fd18361f2 | ||
|
|
de6739c401 | ||
|
|
2801cb1f4a | ||
|
|
b642e36da2 | ||
|
|
df3337936c | ||
|
|
ea05de59b3 | ||
|
|
601b8e3aaa | ||
|
|
a11d9e2cd4 | ||
|
|
df84f665a5 | ||
|
|
498186764a | ||
|
|
02a7d71b70 | ||
|
|
c9761d4aa6 | ||
|
|
c863e7582f | ||
|
|
d4710c6e86 | ||
|
|
e1f3796677 | ||
|
|
ec07ef83a2 | ||
|
|
eddee1b8cb | ||
|
|
fa1d37b4e3 | ||
|
|
5b0523a32a | ||
|
|
5039310199 | ||
|
|
61e03a2758 | ||
|
|
27abb51561 | ||
|
|
09d0846d1b | ||
|
|
8e7cdb8493 | ||
|
|
538caa4d5e | ||
|
|
24bc236eb7 | ||
|
|
f59050fc23 | ||
|
|
1b092f156b | ||
|
|
6a79b9ef87 | ||
|
|
f62940bbda | ||
|
|
c82345c0a0 | ||
|
|
817d0464f6 | ||
|
|
a5bb525614 | ||
|
|
4cb7910e32 | ||
|
|
d7970946eb | ||
|
|
014fb6be8f | ||
|
|
5c7991b707 | ||
|
|
da5fc817d1 | ||
|
|
407c4e800a | ||
|
|
11070b8e16 | ||
|
|
adfdaab4fd | ||
|
|
bfd7fc06c7 | ||
|
|
bd3abc5a2a | ||
|
|
193193024f | ||
|
|
6edc3a9900 | ||
|
|
1bd44e9ce7 | ||
|
|
c7327d62c2 | ||
|
|
90dda8219f | ||
|
|
885979644d | ||
|
|
13c5b0d9cb | ||
|
|
d6e45afef9 | ||
|
|
300aedd9cc | ||
|
|
d9cf836b67 | ||
|
|
293215778f | ||
|
|
95346bd919 | ||
|
|
ceaaed4848 | ||
|
|
abaa69183b | ||
|
|
3e1075410b | ||
|
|
7a88bb0e1c | ||
|
|
7a790581e0 | ||
|
|
d5cc530024 | ||
|
|
d7548325b1 | ||
|
|
d11fd94cdb | ||
|
|
4cbd040485 | ||
|
|
773484a628 | ||
|
|
71c14fac7b | ||
|
|
b2a728e45d | ||
|
|
390798c172 | ||
|
|
284de53f26 | ||
|
|
5a025abddf | ||
|
|
4ab4b1b131 | ||
|
|
13ea970852 | ||
|
|
ba78d5b2c3 | ||
|
|
ce8767cdc8 | ||
|
|
082a9cb59c | ||
|
|
3c37f25b65 | ||
|
|
a079743a02 | ||
|
|
e0852fd651 | ||
|
|
6bbd1e0685 | ||
|
|
4534f6e635 | ||
|
|
c62a7a77a3 | ||
|
|
ecf5ea389f | ||
|
|
010ef4d119 | ||
|
|
4d77cd53f1 | ||
|
|
3cf353b755 | ||
|
|
fd894f5a65 | ||
|
|
a9969b7db2 | ||
|
|
27a08fca84 | ||
|
|
a398bd62a3 |
@@ -450,7 +450,7 @@ function getBuildCppStep(platform, options) {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
// We used to build the C++ dependencies and bun in seperate steps.
|
||||
// We used to build the C++ dependencies and bun in separate steps.
|
||||
// However, as long as the zig build takes longer than both sequentially,
|
||||
// it's cheaper to run them in the same step. Can be revisited in the future.
|
||||
command: [`${command} --target bun`, `${command} --target dependencies`],
|
||||
@@ -922,7 +922,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
@@ -930,7 +930,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into seperate steps (this will be very slow)",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
|
||||
@@ -91,7 +91,7 @@ devTest("html file is watched", {
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
|
||||
@@ -11,10 +11,10 @@ You'll find all of Bun's tests in the `test/` directory.
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
|
||||
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
|
||||
@@ -5,4 +5,6 @@
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
dedd433cbf2e2fe38e51bc166e08fbcc601ad42b # JSValue.undefined -> .jsUndefined()
|
||||
6b4662ff55f58247cc2fd22e85b4f9805b0950a5 # JSValue.jsUndefined() -> .js_undefined
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
1
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -2,6 +2,7 @@ name: Prefilled crash report
|
||||
description: Report a crash in Bun
|
||||
labels:
|
||||
- crash
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
33
.github/workflows/codex-test-sync.yml
vendored
33
.github/workflows/codex-test-sync.yml
vendored
@@ -5,14 +5,15 @@ on:
|
||||
types: [labeled, opened]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "canary"
|
||||
BUN_VERSION: "1.2.15"
|
||||
|
||||
jobs:
|
||||
sync-node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex'))
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
|
||||
contains(github.head_ref, 'codex')
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
@@ -28,15 +29,27 @@ jobs:
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Get changed files and sync tests
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: |
|
||||
test/js/node/test/parallel/**/*.{js,mjs,ts}
|
||||
test/js/node/test/sequential/**/*.{js,mjs,ts}
|
||||
|
||||
- name: Sync tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Get the list of changed files from the PR
|
||||
git diff --name-only origin/main...HEAD | while read -r file; do
|
||||
if [[ "$file" =~ ^test/js/node/test/(parallel|sequential)/(.+)\.js$ ]]; then
|
||||
test_name="${BASH_REMATCH[2]}"
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
fi
|
||||
echo "Changed test files:"
|
||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
||||
|
||||
# Process each changed test file
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
# Extract test name from file path
|
||||
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
done
|
||||
|
||||
- name: Commit changes
|
||||
|
||||
2
.github/workflows/format.yml
vendored
2
.github/workflows/format.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src/**.zig
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
10
.github/workflows/update-zstd.yml
vendored
10
.github/workflows/update-zstd.yml
vendored
@@ -21,16 +21,16 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildZstd.cmake)
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/CloneZstd.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildZstd.cmake"
|
||||
echo "Error: Could not find COMMIT line in CloneZstd.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildZstd.cmake"
|
||||
echo "Error: Invalid git hash format in CloneZstd.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildZstd.cmake
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/CloneZstd.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildZstd.cmake
|
||||
cmake/targets/CloneZstd.cmake
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
|
||||
17
.lldbinit
17
.lldbinit
@@ -1,16 +1 @@
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
command source -C -s true -e true misctools/lldb/init.lldb
|
||||
|
||||
@@ -47,6 +47,8 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
|
||||
> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.sh/docs/installation#cpu-requirements-and-baseline-builds)
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
@@ -50,6 +50,10 @@ bench("murmur64v2 (short)", () => {
|
||||
Bun.hash.murmur64v2(shortStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (short)", () => {
|
||||
Bun.hash.rapidhash(shortStr);
|
||||
});
|
||||
|
||||
bench("wyhash (128 KB)", () => {
|
||||
Bun.hash.wyhash(longStr);
|
||||
});
|
||||
@@ -94,4 +98,8 @@ bench("murmur64v2 (128 KB)", () => {
|
||||
Bun.hash.murmur64v2(longStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (128 KB)", () => {
|
||||
Bun.hash.rapidhash(longStr);
|
||||
});
|
||||
|
||||
run();
|
||||
|
||||
@@ -63,6 +63,7 @@ const BunBuildOptions = struct {
|
||||
/// `./build/codegen` or equivalent
|
||||
codegen_path: []const u8,
|
||||
no_llvm: bool,
|
||||
override_no_export_cpp_apis: bool,
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
@@ -95,6 +96,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
|
||||
const mod = opts.createModule();
|
||||
this.cached_options_module = mod;
|
||||
@@ -206,6 +208,7 @@ pub fn build(b: *Build) !void {
|
||||
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
|
||||
|
||||
const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false;
|
||||
const override_no_export_cpp_apis = b.option(bool, "override-no-export-cpp-apis", "Override the default export_cpp_apis logic to disable exports") orelse false;
|
||||
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
@@ -217,6 +220,7 @@ pub fn build(b: *Build) !void {
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.no_llvm = no_llvm,
|
||||
.override_no_export_cpp_apis = override_no_export_cpp_apis,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
@@ -476,6 +480,7 @@ fn addMultiCheck(
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
var obj = addBunObject(b, &options);
|
||||
@@ -508,6 +513,8 @@ fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize:
|
||||
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
|
||||
}
|
||||
|
||||
translate_c.addIncludePath(b.path("vendor/zstd/lib"));
|
||||
|
||||
if (target.result.os.tag == .windows) {
|
||||
// translate-c is unable to translate the unsuffixed windows functions
|
||||
// like `SetCurrentDirectory` since they are defined with an odd macro
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"src/bun.js/bindings/webcrypto/*/*.cpp",
|
||||
"src/bun.js/bindings/node/*.cpp",
|
||||
"src/bun.js/bindings/node/crypto/*.cpp",
|
||||
"src/bun.js/bindings/node/http/*.cpp",
|
||||
"src/bun.js/bindings/v8/*.cpp",
|
||||
"src/bun.js/bindings/v8/shim/*.cpp",
|
||||
"src/bake/*.cpp",
|
||||
@@ -60,7 +61,9 @@
|
||||
"packages/bun-usockets/src/internal/*.c",
|
||||
"packages/bun-usockets/src/crypto/*.c",
|
||||
"src/bun.js/bindings/uv-posix-polyfills.c",
|
||||
"src/bun.js/bindings/uv-posix-stubs.c"
|
||||
"src/bun.js/bindings/uv-posix-stubs.c",
|
||||
"src/*.c",
|
||||
"src/bun.js/bindings/node/http/llhttp/*.c"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -7,5 +7,9 @@ packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
|
||||
@@ -28,6 +28,7 @@ src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
@@ -144,6 +145,13 @@ src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
@@ -159,6 +167,7 @@ src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
@@ -167,6 +176,7 @@ src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
|
||||
@@ -52,6 +52,7 @@ src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
|
||||
@@ -32,7 +32,11 @@ src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
src/bun.js/api/bun/socket.zig
|
||||
src/bun.js/api/bun/socket/Handlers.zig
|
||||
src/bun.js/api/bun/socket/Listener.zig
|
||||
src/bun.js/api/bun/socket/SocketAddress.zig
|
||||
src/bun.js/api/bun/socket/tls_socket_functions.zig
|
||||
src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig
|
||||
src/bun.js/api/bun/spawn.zig
|
||||
src/bun.js/api/bun/spawn/stdio.zig
|
||||
src/bun.js/api/bun/ssl_wrapper.zig
|
||||
@@ -55,13 +59,24 @@ src/bun.js/api/html_rewriter.zig
|
||||
src/bun.js/api/JSBundler.zig
|
||||
src/bun.js/api/JSTranspiler.zig
|
||||
src/bun.js/api/server.zig
|
||||
src/bun.js/api/server/AnyRequestContext.zig
|
||||
src/bun.js/api/server/FileRoute.zig
|
||||
src/bun.js/api/server/HTMLBundle.zig
|
||||
src/bun.js/api/server/HTTPStatusText.zig
|
||||
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
|
||||
src/bun.js/api/server/NodeHTTPResponse.zig
|
||||
src/bun.js/api/server/RequestContext.zig
|
||||
src/bun.js/api/server/ServerConfig.zig
|
||||
src/bun.js/api/server/ServerWebSocket.zig
|
||||
src/bun.js/api/server/SSLConfig.zig
|
||||
src/bun.js/api/server/StaticRoute.zig
|
||||
src/bun.js/api/server/WebSocketServerContext.zig
|
||||
src/bun.js/api/streams.classes.zig
|
||||
src/bun.js/api/Timer.zig
|
||||
src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -70,6 +85,7 @@ src/bun.js/bindings/AnyPromise.zig
|
||||
src/bun.js/bindings/bun-simdutf.zig
|
||||
src/bun.js/bindings/CachedBytecode.zig
|
||||
src/bun.js/bindings/CallFrame.zig
|
||||
src/bun.js/bindings/CatchScope.zig
|
||||
src/bun.js/bindings/codegen.zig
|
||||
src/bun.js/bindings/CommonAbortReason.zig
|
||||
src/bun.js/bindings/CommonStrings.zig
|
||||
@@ -105,6 +121,7 @@ src/bun.js/bindings/JSPropertyIterator.zig
|
||||
src/bun.js/bindings/JSRef.zig
|
||||
src/bun.js/bindings/JSRuntimeType.zig
|
||||
src/bun.js/bindings/JSString.zig
|
||||
src/bun.js/bindings/JSType.zig
|
||||
src/bun.js/bindings/JSUint8Array.zig
|
||||
src/bun.js/bindings/JSValue.zig
|
||||
src/bun.js/bindings/NodeModuleModule.zig
|
||||
@@ -133,6 +150,21 @@ src/bun.js/ConsoleObject.zig
|
||||
src/bun.js/Counters.zig
|
||||
src/bun.js/Debugger.zig
|
||||
src/bun.js/event_loop.zig
|
||||
src/bun.js/event_loop/AnyEventLoop.zig
|
||||
src/bun.js/event_loop/AnyTask.zig
|
||||
src/bun.js/event_loop/AnyTaskWithExtraContext.zig
|
||||
src/bun.js/event_loop/ConcurrentPromiseTask.zig
|
||||
src/bun.js/event_loop/ConcurrentTask.zig
|
||||
src/bun.js/event_loop/CppTask.zig
|
||||
src/bun.js/event_loop/DeferredTaskQueue.zig
|
||||
src/bun.js/event_loop/EventLoopHandle.zig
|
||||
src/bun.js/event_loop/GarbageCollectionController.zig
|
||||
src/bun.js/event_loop/JSCScheduler.zig
|
||||
src/bun.js/event_loop/ManagedTask.zig
|
||||
src/bun.js/event_loop/MiniEventLoop.zig
|
||||
src/bun.js/event_loop/PosixSignalHandle.zig
|
||||
src/bun.js/event_loop/Task.zig
|
||||
src/bun.js/event_loop/WorkTask.zig
|
||||
src/bun.js/hot_reloader.zig
|
||||
src/bun.js/ipc.zig
|
||||
src/bun.js/javascript_core_c_api.zig
|
||||
@@ -176,6 +208,9 @@ src/bun.js/node/util/parse_args_utils.zig
|
||||
src/bun.js/node/util/parse_args.zig
|
||||
src/bun.js/node/util/validators.zig
|
||||
src/bun.js/node/win_watcher.zig
|
||||
src/bun.js/node/zlib/NativeBrotli.zig
|
||||
src/bun.js/node/zlib/NativeZlib.zig
|
||||
src/bun.js/node/zlib/NativeZstd.zig
|
||||
src/bun.js/ProcessAutoKiller.zig
|
||||
src/bun.js/rare_data.zig
|
||||
src/bun.js/ResolveMessage.zig
|
||||
@@ -228,14 +263,47 @@ src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
src/bundler/BundleThread.zig
|
||||
src/bundler/Chunk.zig
|
||||
src/bundler/DeferredBatchTask.zig
|
||||
src/bundler/entry_points.zig
|
||||
src/bundler/Graph.zig
|
||||
src/bundler/HTMLImportManifest.zig
|
||||
src/bundler/linker_context/computeChunks.zig
|
||||
src/bundler/linker_context/computeCrossChunkDependencies.zig
|
||||
src/bundler/linker_context/convertStmtsForChunk.zig
|
||||
src/bundler/linker_context/convertStmtsForChunkForDevServer.zig
|
||||
src/bundler/linker_context/doStep5.zig
|
||||
src/bundler/linker_context/findAllImportedPartsInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedFilesInCSSOrder.zig
|
||||
src/bundler/linker_context/generateChunksInParallel.zig
|
||||
src/bundler/linker_context/generateCodeForFileInChunkJS.zig
|
||||
src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
src/bundler/ParseTask.zig
|
||||
src/bundler/ServerComponentParseTask.zig
|
||||
src/bundler/ThreadPool.zig
|
||||
src/bunfig.zig
|
||||
src/cache.zig
|
||||
src/ci_info.zig
|
||||
src/cli.zig
|
||||
src/cli/add_command.zig
|
||||
src/cli/add_completions.zig
|
||||
src/cli/Arguments.zig
|
||||
src/cli/audit_command.zig
|
||||
src/cli/build_command.zig
|
||||
src/cli/bunx_command.zig
|
||||
@@ -384,7 +452,22 @@ src/deps/picohttp.zig
|
||||
src/deps/picohttpparser.zig
|
||||
src/deps/tcc.zig
|
||||
src/deps/uws.zig
|
||||
src/deps/uws/App.zig
|
||||
src/deps/uws/BodyReaderMixin.zig
|
||||
src/deps/uws/ConnectingSocket.zig
|
||||
src/deps/uws/InternalLoopData.zig
|
||||
src/deps/uws/ListenSocket.zig
|
||||
src/deps/uws/Loop.zig
|
||||
src/deps/uws/Request.zig
|
||||
src/deps/uws/Response.zig
|
||||
src/deps/uws/socket.zig
|
||||
src/deps/uws/SocketContext.zig
|
||||
src/deps/uws/Timer.zig
|
||||
src/deps/uws/udp.zig
|
||||
src/deps/uws/UpgradedDuplex.zig
|
||||
src/deps/uws/us_socket_t.zig
|
||||
src/deps/uws/WebSocket.zig
|
||||
src/deps/uws/WindowsNamedPipe.zig
|
||||
src/deps/zig-clap/clap.zig
|
||||
src/deps/zig-clap/clap/args.zig
|
||||
src/deps/zig-clap/clap/comptime.zig
|
||||
@@ -405,6 +488,7 @@ src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/futex.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
@@ -435,6 +519,8 @@ src/ini.zig
|
||||
src/install/bin.zig
|
||||
src/install/dependency.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/hoisted_install.zig
|
||||
src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/lifecycle_script_runner.zig
|
||||
@@ -454,6 +540,8 @@ src/install/lockfile/printer/Yarn.zig
|
||||
src/install/lockfile/Tree.zig
|
||||
src/install/migration.zig
|
||||
src/install/npm.zig
|
||||
src/install/PackageInstall.zig
|
||||
src/install/PackageInstaller.zig
|
||||
src/install/PackageManager/CommandLineArguments.zig
|
||||
src/install/PackageManager/PackageJSONEditor.zig
|
||||
src/install/PackageManager/PackageManagerOptions.zig
|
||||
@@ -472,7 +560,6 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/io/time.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
@@ -563,10 +650,24 @@ src/shell/builtin/yes.zig
|
||||
src/shell/EnvMap.zig
|
||||
src/shell/EnvStr.zig
|
||||
src/shell/interpreter.zig
|
||||
src/shell/IO.zig
|
||||
src/shell/IOReader.zig
|
||||
src/shell/IOWriter.zig
|
||||
src/shell/ParsedShellScript.zig
|
||||
src/shell/RefCountedStr.zig
|
||||
src/shell/shell.zig
|
||||
src/shell/states/Assigns.zig
|
||||
src/shell/states/Async.zig
|
||||
src/shell/states/Base.zig
|
||||
src/shell/states/Binary.zig
|
||||
src/shell/states/Cmd.zig
|
||||
src/shell/states/CondExpr.zig
|
||||
src/shell/states/Expansion.zig
|
||||
src/shell/states/If.zig
|
||||
src/shell/states/Pipeline.zig
|
||||
src/shell/states/Script.zig
|
||||
src/shell/states/Stmt.zig
|
||||
src/shell/states/Subshell.zig
|
||||
src/shell/subproc.zig
|
||||
src/shell/util.zig
|
||||
src/sourcemap/CodeCoverage.zig
|
||||
@@ -582,12 +683,16 @@ src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
|
||||
@@ -42,6 +42,29 @@ else()
|
||||
set(CONFIGURE_DEPENDS "")
|
||||
endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
BoringSSL
|
||||
Brotli
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
include(CloneZstd)
|
||||
# foreach(dependency ${BUN_DEPENDENCIES})
|
||||
# include(Clone${dependency})
|
||||
# endforeach()
|
||||
|
||||
# --- Codegen ---
|
||||
|
||||
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
|
||||
@@ -408,6 +431,7 @@ set(BUN_OBJECT_LUT_SOURCES
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
|
||||
)
|
||||
@@ -421,6 +445,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingFs.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingHTTPParser.lut.h
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
|
||||
)
|
||||
@@ -580,6 +605,7 @@ register_command(
|
||||
${BUN_ZIG_OUTPUT}
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -647,20 +673,14 @@ if(WIN32)
|
||||
else()
|
||||
set(Bun_VERSION_WITH_TAG ${VERSION})
|
||||
endif()
|
||||
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
|
||||
configure_file(${CWD}/src/bun.ico ${CODEGEN_PATH}/bun.ico COPYONLY)
|
||||
set(BUN_ICO_PATH ${CODEGEN_PATH}/bun.ico)
|
||||
configure_file(
|
||||
${CWD}/src/windows-app-info.rc
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT ${CODEGEN_PATH}/windows-app-info.res
|
||||
COMMAND rc.exe /fo ${CODEGEN_PATH}/windows-app-info.res ${CODEGEN_PATH}/windows-app-info.rc
|
||||
DEPENDS ${CODEGEN_PATH}/windows-app-info.rc ${CODEGEN_PATH}/bun.ico
|
||||
COMMENT "Adding Windows resource file ${CODEGEN_PATH}/windows-app-info.res with ico in ${CODEGEN_PATH}/bun.ico"
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.res)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -732,6 +752,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${CWD}/src/bun.js/bindings/webcore
|
||||
${CWD}/src/bun.js/bindings/webcrypto
|
||||
${CWD}/src/bun.js/bindings/node/crypto
|
||||
${CWD}/src/bun.js/bindings/node/http
|
||||
${CWD}/src/bun.js/bindings/sqlite
|
||||
${CWD}/src/bun.js/bindings/v8
|
||||
${CWD}/src/bun.js/modules
|
||||
@@ -890,6 +911,9 @@ if(NOT WIN32)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-Wno-nullability-completeness
|
||||
-Wno-inconsistent-dllimport
|
||||
-Wno-incompatible-pointer-types
|
||||
-Wno-deprecated-declarations
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1015,6 +1039,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1023,6 +1048,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
@@ -1046,22 +1072,6 @@ endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
BoringSSL
|
||||
Brotli
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_DEPENDENCIES Libuv)
|
||||
endif()
|
||||
|
||||
@@ -1,12 +1,3 @@
|
||||
register_repository(
|
||||
NAME
|
||||
zstd
|
||||
REPOSITORY
|
||||
facebook/zstd
|
||||
COMMIT
|
||||
f8745da6ff1ad1e7bab384bd1f9d742439278e99
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
zstd
|
||||
@@ -23,4 +14,6 @@ register_cmake_command(
|
||||
LIBRARIES
|
||||
zstd_static WIN32
|
||||
zstd UNIX
|
||||
INCLUDES
|
||||
lib
|
||||
)
|
||||
|
||||
8
cmake/targets/CloneZstd.cmake
Normal file
8
cmake/targets/CloneZstd.cmake
Normal file
@@ -0,0 +1,8 @@
|
||||
register_repository(
|
||||
NAME
|
||||
zstd
|
||||
REPOSITORY
|
||||
facebook/zstd
|
||||
COMMIT
|
||||
f8745da6ff1ad1e7bab384bd1f9d742439278e99
|
||||
)
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION b98e20b11e6ab044f73218bdd05ab064587b9ead)
|
||||
set(WEBKIT_VERSION 397dafc9721b8f8046f9448abb6dbc14efe096d3)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -55,13 +55,13 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
|
||||
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
|
||||
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
|
||||
|
||||
if(CI AND CMAKE_HOST_APPLE)
|
||||
if(CI)
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT ON)
|
||||
else()
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT})
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT})
|
||||
|
||||
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
|
||||
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})
|
||||
|
||||
@@ -260,7 +260,6 @@ _bun_pm_completion() {
|
||||
'hash\:"generate & print the hash of the current lockfile" '
|
||||
'hash-string\:"print the string used to hash the lockfile" '
|
||||
'hash-print\:"print the hash stored in the current lockfile" '
|
||||
'audit\:"run a security audit of dependencies in Bun'\''s lockfile"'
|
||||
'cache\:"print the path to the cache folder" '
|
||||
)
|
||||
|
||||
@@ -540,6 +539,7 @@ _bun_update_completion() {
|
||||
'--save[Save to package.json]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--latest[Updates dependencies to latest version, regardless of compatibility]' \
|
||||
'-f[Always request the latest versions from the registry & reinstall all dependencies]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependencies]' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
@@ -573,7 +573,7 @@ _bun_outdated_completion() {
|
||||
'--no-progress[Disable the progress bar]' \
|
||||
'--help[Print this help menu]' &&
|
||||
ret=0
|
||||
|
||||
|
||||
case $state in
|
||||
config)
|
||||
_bun_list_bunfig_toml
|
||||
|
||||
@@ -175,6 +175,7 @@ Bun.hash.xxHash3("data", 1234);
|
||||
Bun.hash.murmur32v3("data", 1234);
|
||||
Bun.hash.murmur32v2("data", 1234);
|
||||
Bun.hash.murmur64v2("data", 1234);
|
||||
Bun.hash.rapidhash("data", 1234);
|
||||
```
|
||||
|
||||
## `Bun.CryptoHasher`
|
||||
|
||||
@@ -582,11 +582,11 @@ Compresses a `Uint8Array` using zlib's DEFLATE algorithm.
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
const compressed = Bun.deflateSync(buf);
|
||||
|
||||
buf; // => Uint8Array(25)
|
||||
compressed; // => Uint8Array(10)
|
||||
buf; // => Buffer(500)
|
||||
compressed; // => Uint8Array(12)
|
||||
```
|
||||
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bungzipsync).
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bun-gzipsync).
|
||||
|
||||
## `Bun.inflateSync()`
|
||||
|
||||
|
||||
65
docs/cli/info.md
Normal file
65
docs/cli/info.md
Normal file
@@ -0,0 +1,65 @@
|
||||
`bun info` displays package metadata from the npm registry.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
$ bun info react
|
||||
```
|
||||
|
||||
This will display information about the `react` package, including its latest version, description, homepage, dependencies, and more.
|
||||
|
||||
## Viewing specific versions
|
||||
|
||||
To view information about a specific version:
|
||||
|
||||
```bash
|
||||
$ bun info react@18.0.0
|
||||
```
|
||||
|
||||
## Viewing specific properties
|
||||
|
||||
You can also query specific properties from the package metadata:
|
||||
|
||||
```bash
|
||||
$ bun info react version
|
||||
$ bun info react dependencies
|
||||
$ bun info react repository.url
|
||||
```
|
||||
|
||||
## JSON output
|
||||
|
||||
To get the output in JSON format, use the `--json` flag:
|
||||
|
||||
```bash
|
||||
$ bun info react --json
|
||||
```
|
||||
|
||||
## Alias
|
||||
|
||||
`bun pm view` is an alias for `bun info`:
|
||||
|
||||
```bash
|
||||
$ bun pm view react # equivalent to: bun info react
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
# View basic package information
|
||||
$ bun info is-number
|
||||
|
||||
# View a specific version
|
||||
$ bun info is-number@7.0.0
|
||||
|
||||
# View all available versions
|
||||
$ bun info is-number versions
|
||||
|
||||
# View package dependencies
|
||||
$ bun info express dependencies
|
||||
|
||||
# View package homepage
|
||||
$ bun info lodash homepage
|
||||
|
||||
# Get JSON output
|
||||
$ bun info react --json
|
||||
```
|
||||
@@ -223,7 +223,16 @@ For convenience, here are download links for the latest version:
|
||||
|
||||
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
### CPU requirements and `baseline` builds
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install script automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
|
||||
| Build | Intel requirement | AMD requirement |
|
||||
| ------------ | ------------------------------------------------------------------ | ------------------ |
|
||||
| x64 | Haswell (4th generation Core) or newer, except some low-end models | Excavator or newer |
|
||||
| x64-baseline | Nehalem (1st generation Core) or newer | Bulldozer or newer |
|
||||
|
||||
Bun does not currently support any CPUs older than the `baseline` target, which mandates the SSE4.2 extension.
|
||||
|
||||
Bun also publishes `darwin-x64-baseline` binaries, but these are just a copy of the `darwin-x64` ones so they still have the same CPU requirement. We only maintain these since some tools expect them to exist. Bun requires macOS 13.0 or later, which does not support any CPUs that don't meet our requirement.
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ The `define` field allows you to replace certain global identifiers with constan
|
||||
|
||||
### `loader`
|
||||
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun. If
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun.
|
||||
|
||||
```toml
|
||||
[loader]
|
||||
@@ -382,6 +382,17 @@ registry = { url = "https://registry.npmjs.org", token = "123456" }
|
||||
registry = "https://username:password@registry.npmjs.org"
|
||||
```
|
||||
|
||||
### `install.linkWorkspacePackages`
|
||||
|
||||
To configure how workspace packages are linked, use the `install.linkWorkspacePackages` option.
|
||||
|
||||
Whether to link workspace packages from the monorepo root to their respective `node_modules` directories. Default `true`.
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linkWorkspacePackages = true
|
||||
```
|
||||
|
||||
### `install.scopes`
|
||||
|
||||
To configure a registry for a particular scope (e.g. `@myorg/<package>`) use `install.scopes`. You can reference environment variables with `$variable` notation.
|
||||
|
||||
@@ -102,7 +102,7 @@ Once the plugin is registered, `.yaml` and `.yml` files can be directly imported
|
||||
{% codetabs %}
|
||||
|
||||
```ts#index.ts
|
||||
import data from "./data.yml"
|
||||
import * as data from "./data.yml"
|
||||
|
||||
console.log(data);
|
||||
```
|
||||
|
||||
@@ -17,6 +17,7 @@ console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
|
||||
19
misctools/lldb/init.lldb
Normal file
19
misctools/lldb/init.lldb
Normal file
@@ -0,0 +1,19 @@
|
||||
# This file is separate from .lldbinit because it has to be in the same directory as the Python
|
||||
# modules in order for the "attach" action to work.
|
||||
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import -c lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import -c lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.16",
|
||||
"version": "1.2.17",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
@@ -24,8 +24,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun run build:debug",
|
||||
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
|
||||
@@ -743,7 +743,7 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
source,
|
||||
request,
|
||||
// It is theoretically possible for a breakpoint to resolve to multiple locations.
|
||||
// In that case, send a seperate `breakpoint` event for each one, excluding the first.
|
||||
// In that case, send a separate `breakpoint` event for each one, excluding the first.
|
||||
notify: i > 0,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -850,7 +850,7 @@ const Summary = ({ errorCount, onClose }: { errorCount: number; onClose: () => v
|
||||
|
||||
<a href="https://bun.sh/discord" target="_blank" className="BunError-Summary-help">
|
||||
<svg width="18" viewBox="0 0 71 55" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<g clipPath="url(#clip0)">
|
||||
<path
|
||||
d="M60.1045 4.8978C55.5792 2.8214 50.7265 1.2916 45.6527 0.41542C45.5603 0.39851 45.468 0.440769 45.4204 0.525289C44.7963 1.6353 44.105 3.0834 43.6209 4.2216C38.1637 3.4046 32.7345 3.4046 27.3892 4.2216C26.905 3.0581 26.1886 1.6353 25.5617 0.525289C25.5141 0.443589 25.4218 0.40133 25.3294 0.41542C20.2584 1.2888 15.4057 2.8186 10.8776 4.8978C10.8384 4.9147 10.8048 4.9429 10.7825 4.9795C1.57795 18.7309 -0.943561 32.1443 0.293408 45.3914C0.299005 45.4562 0.335386 45.5182 0.385761 45.5576C6.45866 50.0174 12.3413 52.7249 18.1147 54.5195C18.2071 54.5477 18.305 54.5139 18.3638 54.4378C19.7295 52.5728 20.9469 50.6063 21.9907 48.5383C22.0523 48.4172 21.9935 48.2735 21.8676 48.2256C19.9366 47.4931 18.0979 46.6 16.3292 45.5858C16.1893 45.5041 16.1781 45.304 16.3068 45.2082C16.679 44.9293 17.0513 44.6391 17.4067 44.3461C17.471 44.2926 17.5606 44.2813 17.6362 44.3151C29.2558 49.6202 41.8354 49.6202 53.3179 44.3151C53.3935 44.2785 53.4831 44.2898 53.5502 44.3433C53.9057 44.6363 54.2779 44.9293 54.6529 45.2082C54.7816 45.304 54.7732 45.5041 54.6333 45.5858C52.8646 46.6197 51.0259 47.4931 49.0921 48.2228C48.9662 48.2707 48.9102 48.4172 48.9718 48.5383C50.038 50.6034 51.2554 52.5699 52.5959 54.435C52.6519 54.5139 52.7526 54.5477 52.845 54.5195C58.6464 52.7249 64.529 50.0174 70.6019 45.5576C70.6551 45.5182 70.6887 45.459 70.6943 45.3942C72.1747 30.0791 68.2147 16.7757 60.1968 4.9823C60.1772 4.9429 60.1437 4.9147 60.1045 4.8978ZM23.7259 37.3253C20.2276 37.3253 17.3451 34.1136 17.3451 30.1693C17.3451 26.225 20.1717 23.0133 23.7259 23.0133C27.308 23.0133 30.1626 26.2532 30.1066 30.1693C30.1066 34.1136 27.28 37.3253 23.7259 37.3253ZM47.3178 37.3253C43.8196 37.3253 40.9371 34.1136 40.9371 30.1693C40.9371 26.225 43.7636 23.0133 47.3178 23.0133C50.9 23.0133 53.7545 26.2532 53.6986 30.1693C53.6986 34.1136 50.9 37.3253 47.3178 37.3253Z"
|
||||
fill="#5865F2"
|
||||
|
||||
@@ -72,6 +72,7 @@ async function buildRootModule(dryRun?: boolean) {
|
||||
},
|
||||
});
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(join(cwd, "bin", "bunx.exe"), "");
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
@@ -105,7 +106,7 @@ without *requiring* a postinstall script.
|
||||
),
|
||||
bin: {
|
||||
bun: "bin/bun.exe",
|
||||
bunx: "bin/bun.exe",
|
||||
bunx: "bin/bunx.exe",
|
||||
},
|
||||
os,
|
||||
cpu,
|
||||
|
||||
@@ -157,3 +157,15 @@ export function exists(path: string): boolean {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function link(path: string, newPath: string): void {
|
||||
debug("link", path, newPath);
|
||||
try {
|
||||
fs.unlinkSync(newPath);
|
||||
fs.linkSync(path, newPath);
|
||||
return;
|
||||
} catch (error) {
|
||||
copy(path, newPath);
|
||||
debug("fs.linkSync failed, reverting to copy", error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { unzipSync } from "zlib";
|
||||
import { debug, error } from "../console";
|
||||
import { fetch } from "../fetch";
|
||||
import { chmod, join, rename, rm, tmp, write } from "../fs";
|
||||
import { chmod, join, link, rename, rm, tmp, write } from "../fs";
|
||||
import type { Platform } from "../platform";
|
||||
import { abi, arch, os, supportedPlatforms } from "../platform";
|
||||
import { spawn } from "../spawn";
|
||||
@@ -125,6 +125,7 @@ export function optimizeBun(path: string): void {
|
||||
os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
|
||||
try {
|
||||
rename(path, join(__dirname, "bin", "bun.exe"));
|
||||
link(join(__dirname, "bin", "bun.exe"), join(__dirname, "bin", "bunx.exe"));
|
||||
return;
|
||||
} catch (error) {
|
||||
debug("optimizeBun failed", error);
|
||||
|
||||
186
packages/bun-types/bun.d.ts
vendored
186
packages/bun-types/bun.d.ts
vendored
@@ -1126,6 +1126,7 @@ declare module "bun" {
|
||||
* This will be used by fetch() and Bun.connect() to avoid DNS lookups.
|
||||
*
|
||||
* @param hostname The hostname to prefetch
|
||||
* @param port The port to prefetch. Default is 443. Port helps distinguish between IPv6 vs IPv4-only connections.
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
@@ -1135,7 +1136,7 @@ declare module "bun" {
|
||||
* await fetch('https://example.com');
|
||||
* ```
|
||||
*/
|
||||
function prefetch(hostname: string): void;
|
||||
function prefetch(hostname: string, port?: number): void;
|
||||
|
||||
/**
|
||||
* **Experimental API**
|
||||
@@ -1865,6 +1866,7 @@ declare module "bun" {
|
||||
murmur32v3: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur32v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur64v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
rapidhash: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
}
|
||||
|
||||
type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
|
||||
@@ -4922,7 +4924,7 @@ declare module "bun" {
|
||||
*
|
||||
* @param force Synchronously run the garbage collector
|
||||
*/
|
||||
function gc(force: boolean): void;
|
||||
function gc(force?: boolean): void;
|
||||
|
||||
/**
|
||||
* JavaScriptCore engine's internal heap snapshot
|
||||
@@ -5871,31 +5873,76 @@ declare module "bun" {
|
||||
index: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a TCP or TLS socket connection used for network communication.
|
||||
* This interface provides methods for reading, writing, managing the connection state,
|
||||
* and handling TLS-specific features if applicable.
|
||||
*
|
||||
* Sockets are created using `Bun.connect()` or accepted by a `Bun.listen()` server.
|
||||
*
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
interface Socket<Data = undefined> extends Disposable {
|
||||
/**
|
||||
* Write `data` to the socket
|
||||
* Writes `data` to the socket. This method is unbuffered and non-blocking. This uses the `sendto(2)` syscall internally.
|
||||
*
|
||||
* @param data The data to write to the socket
|
||||
* @param byteOffset The offset in the buffer to start writing from (defaults to 0)
|
||||
* @param byteLength The number of bytes to write (defaults to the length of the buffer)
|
||||
* For optimal performance with multiple small writes, consider batching multiple
|
||||
* writes together into a single `socket.write()` call.
|
||||
*
|
||||
* When passed a string, `byteOffset` and `byteLength` refer to the UTF-8 offset, not the string character offset.
|
||||
* @param data The data to write. Can be a string (encoded as UTF-8), `ArrayBuffer`, `TypedArray`, or `DataView`.
|
||||
* @param byteOffset The offset in bytes within the buffer to start writing from. Defaults to 0. Ignored for strings.
|
||||
* @param byteLength The number of bytes to write from the buffer. Defaults to the remaining length of the buffer from the offset. Ignored for strings.
|
||||
* @returns The number of bytes written. Returns `-1` if the socket is closed or shutting down. Can return less than the input size if the socket's buffer is full (backpressure).
|
||||
* @example
|
||||
* ```ts
|
||||
* // Send a string
|
||||
* const bytesWritten = socket.write("Hello, world!\n");
|
||||
*
|
||||
* This is unbuffered as of Bun v0.2.2. That means individual write() calls
|
||||
* will be slow. In the future, Bun will buffer writes and flush them at the
|
||||
* end of the tick, when the event loop is idle, or sooner if the buffer is full.
|
||||
* // Send binary data
|
||||
* const buffer = new Uint8Array([0x01, 0x02, 0x03]);
|
||||
* socket.write(buffer);
|
||||
*
|
||||
* // Send part of a buffer
|
||||
* const largeBuffer = new Uint8Array(1024);
|
||||
* // ... fill largeBuffer ...
|
||||
* socket.write(largeBuffer, 100, 50); // Write 50 bytes starting from index 100
|
||||
* ```
|
||||
*/
|
||||
write(data: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
/**
|
||||
* The data context for the socket.
|
||||
* The user-defined data associated with this socket instance.
|
||||
* This can be set when the socket is created via `Bun.connect({ data: ... })`.
|
||||
* It can be read or updated at any time.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // In a socket handler
|
||||
* function open(socket: Socket<{ userId: string }>) {
|
||||
* console.log(`Socket opened for user: ${socket.data.userId}`);
|
||||
* socket.data.lastActivity = Date.now(); // Update data
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
data: Data;
|
||||
|
||||
/**
|
||||
* Like {@link Socket.write} except it includes a TCP FIN packet
|
||||
* Sends the final data chunk and initiates a graceful shutdown of the socket's write side.
|
||||
* After calling `end()`, no more data can be written using `write()` or `end()`.
|
||||
* The socket remains readable until the remote end also closes its write side or the connection is terminated.
|
||||
* This sends a TCP FIN packet after writing the data.
|
||||
*
|
||||
* Use it to send your last message and close the connection.
|
||||
* @param data Optional final data to write before closing. Same types as `write()`.
|
||||
* @param byteOffset Optional offset for buffer data.
|
||||
* @param byteLength Optional length for buffer data.
|
||||
* @returns The number of bytes written for the final chunk. Returns `-1` if the socket was already closed or shutting down.
|
||||
* @example
|
||||
* ```ts
|
||||
* // send some data and close the write side
|
||||
* socket.end("Goodbye!");
|
||||
* // or close write side without sending final data
|
||||
* socket.end();
|
||||
* ```
|
||||
*/
|
||||
end(data?: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
@@ -5922,20 +5969,33 @@ declare module "bun" {
|
||||
timeout(seconds: number): void;
|
||||
|
||||
/**
|
||||
* Forcefully close the socket. The other end may not receive all data, and
|
||||
* the socket will be closed immediately.
|
||||
* Forcefully closes the socket connection immediately. This is an abrupt termination, unlike the graceful shutdown initiated by `end()`.
|
||||
* It uses `SO_LINGER` with `l_onoff=1` and `l_linger=0` before calling `close(2)`.
|
||||
* Consider using {@link close close()} or {@link end end()} for graceful shutdowns.
|
||||
*
|
||||
* This passes `SO_LINGER` with `l_onoff` set to `1` and `l_linger` set to
|
||||
* `0` and then calls `close(2)`.
|
||||
* @example
|
||||
* ```ts
|
||||
* socket.terminate();
|
||||
* ```
|
||||
*/
|
||||
terminate(): void;
|
||||
|
||||
/**
|
||||
* Shutdown writes to a socket
|
||||
* Shuts down the write-half or both halves of the connection.
|
||||
* This allows the socket to enter a half-closed state where it can still receive data
|
||||
* but can no longer send data (`halfClose = true`), or close both read and write
|
||||
* (`halfClose = false`, similar to `end()` but potentially more immediate depending on OS).
|
||||
* Calls `shutdown(2)` syscall internally.
|
||||
*
|
||||
* This makes the socket a half-closed socket. It can still receive data.
|
||||
* @param halfClose If `true`, only shuts down the write side (allows receiving). If `false` or omitted, shuts down both read and write. Defaults to `false`.
|
||||
* @example
|
||||
* ```ts
|
||||
* // Stop sending data, but allow receiving
|
||||
* socket.shutdown(true);
|
||||
*
|
||||
* This calls [shutdown(2)](https://man7.org/linux/man-pages/man2/shutdown.2.html) internally
|
||||
* // Shutdown both reading and writing
|
||||
* socket.shutdown();
|
||||
* ```
|
||||
*/
|
||||
shutdown(halfClose?: boolean): void;
|
||||
|
||||
@@ -5961,6 +6021,11 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Flush any buffered data to the socket
|
||||
* This attempts to send the data immediately, but success depends on the network conditions
|
||||
* and the receiving end.
|
||||
* It might be necessary after several `write` calls if immediate sending is critical,
|
||||
* though often the OS handles flushing efficiently. Note that `write` calls outside
|
||||
* `open`/`data`/`drain` might benefit from manual `cork`/`flush`.
|
||||
*/
|
||||
flush(): void;
|
||||
|
||||
@@ -5982,17 +6047,31 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Remote IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly remoteAddress: string;
|
||||
|
||||
/**
|
||||
* Remote port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly remotePort: number;
|
||||
|
||||
/**
|
||||
* IP protocol family used for the local endpoint of the socket
|
||||
* @example "IPv4" | "IPv6"
|
||||
*/
|
||||
readonly localFamily: "IPv4" | "IPv6";
|
||||
|
||||
/**
|
||||
* Local IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly localAddress: string;
|
||||
|
||||
/**
|
||||
* local port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly localPort: number;
|
||||
|
||||
@@ -6156,6 +6235,8 @@ declare module "bun" {
|
||||
/**
|
||||
* See `Session Resumption` for more information.
|
||||
* @return `true` if the session was reused, `false` otherwise.
|
||||
* **TLS Only:** Checks if the current TLS session was resumed from a previous session.
|
||||
* Returns `true` if the session was resumed, `false` otherwise.
|
||||
*/
|
||||
isSessionReused(): boolean;
|
||||
|
||||
@@ -6198,30 +6279,91 @@ declare module "bun" {
|
||||
setKeepAlive(enable?: boolean, initialDelay?: number): boolean;
|
||||
|
||||
/**
|
||||
* The number of bytes written to the socket.
|
||||
* The total number of bytes successfully written to the socket since it was established.
|
||||
* This includes data currently buffered by the OS but not yet acknowledged by the remote peer.
|
||||
*/
|
||||
readonly bytesWritten: number;
|
||||
|
||||
/**
|
||||
* Alias for `socket.end()`. Allows the socket to be used with `using` declarations
|
||||
* for automatic resource management.
|
||||
* @example
|
||||
* ```ts
|
||||
* async function processSocket() {
|
||||
* using socket = await Bun.connect({ ... });
|
||||
* socket.write("Data");
|
||||
* // socket.end() is called automatically when exiting the scope
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
resume(): void;
|
||||
|
||||
pause(): void;
|
||||
|
||||
/**
|
||||
* If this is a TLS Socket
|
||||
*/
|
||||
renegotiate(): void;
|
||||
|
||||
/**
|
||||
* Sets the verify mode of the socket.
|
||||
*
|
||||
* @param requestCert Whether to request a certificate.
|
||||
* @param rejectUnauthorized Whether to reject unauthorized certificates.
|
||||
*/
|
||||
setVerifyMode(requestCert: boolean, rejectUnauthorized: boolean): void;
|
||||
|
||||
getSession(): void;
|
||||
|
||||
/**
|
||||
* Sets the session of the socket.
|
||||
*
|
||||
* @param session The session to set.
|
||||
*/
|
||||
setSession(session: string | Buffer | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Exports the keying material of the socket.
|
||||
*
|
||||
* @param length The length of the keying material to export.
|
||||
* @param label The label of the keying material to export.
|
||||
* @param context The context of the keying material to export.
|
||||
*/
|
||||
exportKeyingMaterial(length: number, label: string, context?: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Upgrades the socket to a TLS socket.
|
||||
*
|
||||
* @param options The options for the upgrade.
|
||||
* @returns A tuple containing the raw socket and the TLS socket.
|
||||
* @see {@link TLSUpgradeOptions}
|
||||
*/
|
||||
upgradeTLS<Data>(options: TLSUpgradeOptions<Data>): [raw: Socket<Data>, tls: Socket<Data>];
|
||||
|
||||
/**
|
||||
* Closes the socket.
|
||||
*
|
||||
* This is a wrapper around `end()` and `shutdown()`.
|
||||
*
|
||||
* @see {@link end}
|
||||
* @see {@link shutdown}
|
||||
*/
|
||||
close(): void;
|
||||
|
||||
/**
|
||||
* Returns the servername of the socket.
|
||||
*
|
||||
* @see {@link setServername}
|
||||
*/
|
||||
getServername(): string;
|
||||
|
||||
/**
|
||||
* Sets the servername of the socket.
|
||||
*
|
||||
* @see {@link getServername}
|
||||
*/
|
||||
setServername(name: string): void;
|
||||
}
|
||||
|
||||
@@ -6709,7 +6851,7 @@ declare module "bun" {
|
||||
* incoming messages, and `subprocess.send` can send messages to the subprocess. Messages are serialized
|
||||
* using the JSC serialize API, which allows for the same types that `postMessage`/`structuredClone` supports.
|
||||
*
|
||||
* The subprocess can send and recieve messages by using `process.send` and `process.on("message")`,
|
||||
* The subprocess can send and receive messages by using `process.send` and `process.on("message")`,
|
||||
* respectively. This is the same API as what Node.js exposes when `child_process.fork()` is used.
|
||||
*
|
||||
* Currently, this is only compatible with processes that are other `bun` instances.
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
"files": [
|
||||
"./*.d.ts",
|
||||
"docs/**/*.md",
|
||||
"docs/*.md"
|
||||
"docs/*.md",
|
||||
"CLAUDE.md",
|
||||
"README.md"
|
||||
],
|
||||
"homepage": "https://bun.sh",
|
||||
"dependencies": {
|
||||
@@ -23,7 +25,7 @@
|
||||
"scripts": {
|
||||
"prebuild": "echo $(pwd)",
|
||||
"copy-docs": "rm -rf docs && cp -rL ../../docs/ ./docs && find ./docs -type f -name '*.md' -exec sed -i 's/\\$BUN_LATEST_VERSION/'\"${BUN_VERSION#bun-v}\"'/g' {} +",
|
||||
"build": "bun run copy-docs && bun scripts/build.ts",
|
||||
"build": "bun run copy-docs && cp ../../src/init/rule.md CLAUDE.md && bun scripts/build.ts",
|
||||
"test": "tsc",
|
||||
"fmt": "echo $(which biome) && biome format --write ."
|
||||
},
|
||||
|
||||
168
packages/bun-types/redis.d.ts
vendored
168
packages/bun-types/redis.d.ts
vendored
@@ -50,6 +50,10 @@ declare module "bun" {
|
||||
enableAutoPipelining?: boolean;
|
||||
}
|
||||
|
||||
export namespace RedisClient {
|
||||
type KeyLike = string | ArrayBufferView | Blob;
|
||||
}
|
||||
|
||||
export class RedisClient {
|
||||
/**
|
||||
* Creates a new Redis client
|
||||
@@ -112,14 +116,14 @@ declare module "bun" {
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the key's value as a string, or null if the key doesn't exist
|
||||
*/
|
||||
get(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
get(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key as a Uint8Array
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the key's value as a Uint8Array, or null if the key doesn't exist
|
||||
*/
|
||||
getBuffer(key: string | ArrayBufferView | Blob): Promise<Uint8Array<ArrayBuffer> | null>;
|
||||
getBuffer(key: RedisClient.KeyLike): Promise<Uint8Array<ArrayBuffer> | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value
|
||||
@@ -127,7 +131,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration
|
||||
@@ -136,12 +140,7 @@ declare module "bun" {
|
||||
* @param ex Set the specified expire time, in seconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
ex: "EX",
|
||||
seconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, ex: "EX", seconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration
|
||||
@@ -150,12 +149,7 @@ declare module "bun" {
|
||||
* @param px Set the specified expire time, in milliseconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
px: "PX",
|
||||
milliseconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, px: "PX", milliseconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration at a specific Unix timestamp
|
||||
@@ -164,12 +158,7 @@ declare module "bun" {
|
||||
* @param exat Set the specified Unix time at which the key will expire, in seconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
exat: "EXAT",
|
||||
timestampSeconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, exat: "EXAT", timestampSeconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration at a specific Unix timestamp
|
||||
@@ -179,8 +168,8 @@ declare module "bun" {
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
key: RedisClient.KeyLike,
|
||||
value: RedisClient.KeyLike,
|
||||
pxat: "PXAT",
|
||||
timestampMilliseconds: number,
|
||||
): Promise<"OK">;
|
||||
@@ -192,7 +181,7 @@ declare module "bun" {
|
||||
* @param nx Only set the key if it does not already exist
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key already exists
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob, nx: "NX"): Promise<"OK" | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, nx: "NX"): Promise<"OK" | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value only if key already exists
|
||||
@@ -201,7 +190,7 @@ declare module "bun" {
|
||||
* @param xx Only set the key if it already exists
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key does not exist
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob, xx: "XX"): Promise<"OK" | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, xx: "XX"): Promise<"OK" | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value and return the old value
|
||||
@@ -210,11 +199,7 @@ declare module "bun" {
|
||||
* @param get Return the old string stored at key, or null if key did not exist
|
||||
* @returns Promise that resolves with the old value, or null if key did not exist
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
get: "GET",
|
||||
): Promise<string | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, get: "GET"): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value and retain the time to live
|
||||
@@ -223,11 +208,7 @@ declare module "bun" {
|
||||
* @param keepttl Retain the time to live associated with the key
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
keepttl: "KEEPTTL",
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, keepttl: "KEEPTTL"): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with various options
|
||||
@@ -236,39 +217,35 @@ declare module "bun" {
|
||||
* @param options Array of options (EX, PX, EXAT, PXAT, NX, XX, KEEPTTL, GET)
|
||||
* @returns Promise that resolves with "OK" on success, null if NX/XX condition not met, or the old value if GET is specified
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
...options: string[]
|
||||
): Promise<"OK" | string | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, ...options: string[]): Promise<"OK" | string | null>;
|
||||
|
||||
/**
|
||||
* Delete a key
|
||||
* @param key The key to delete
|
||||
* Delete a key(s)
|
||||
* @param keys The keys to delete
|
||||
* @returns Promise that resolves with the number of keys removed
|
||||
*/
|
||||
del(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
del(...keys: RedisClient.KeyLike[]): Promise<number>;
|
||||
|
||||
/**
|
||||
* Increment the integer value of a key by one
|
||||
* @param key The key to increment
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
incr(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
incr(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Decrement the integer value of a key by one
|
||||
* @param key The key to decrement
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
decr(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
decr(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Determine if a key exists
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with true if the key exists, false otherwise
|
||||
*/
|
||||
exists(key: string | ArrayBufferView | Blob): Promise<boolean>;
|
||||
exists(key: RedisClient.KeyLike): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Set a key's time to live in seconds
|
||||
@@ -276,14 +253,14 @@ declare module "bun" {
|
||||
* @param seconds The number of seconds until expiration
|
||||
* @returns Promise that resolves with 1 if the timeout was set, 0 if not
|
||||
*/
|
||||
expire(key: string | ArrayBufferView | Blob, seconds: number): Promise<number>;
|
||||
expire(key: RedisClient.KeyLike, seconds: number): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the time to live for a key in seconds
|
||||
* @param key The key to get the TTL for
|
||||
* @returns Promise that resolves with the TTL, -1 if no expiry, or -2 if key doesn't exist
|
||||
*/
|
||||
ttl(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
ttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set multiple hash fields to multiple values
|
||||
@@ -291,7 +268,7 @@ declare module "bun" {
|
||||
* @param fieldValues An array of alternating field names and values
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
hmset(key: string | ArrayBufferView | Blob, fieldValues: string[]): Promise<string>;
|
||||
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get the values of all the given hash fields
|
||||
@@ -299,7 +276,7 @@ declare module "bun" {
|
||||
* @param fields The fields to get
|
||||
* @returns Promise that resolves with an array of values
|
||||
*/
|
||||
hmget(key: string | ArrayBufferView | Blob, fields: string[]): Promise<Array<string | null>>;
|
||||
hmget(key: RedisClient.KeyLike, fields: string[]): Promise<Array<string | null>>;
|
||||
|
||||
/**
|
||||
* Check if a value is a member of a set
|
||||
@@ -307,7 +284,7 @@ declare module "bun" {
|
||||
* @param member The member to check
|
||||
* @returns Promise that resolves with true if the member exists, false otherwise
|
||||
*/
|
||||
sismember(key: string | ArrayBufferView | Blob, member: string): Promise<boolean>;
|
||||
sismember(key: RedisClient.KeyLike, member: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Add a member to a set
|
||||
@@ -315,7 +292,7 @@ declare module "bun" {
|
||||
* @param member The member to add
|
||||
* @returns Promise that resolves with 1 if the member was added, 0 if it already existed
|
||||
*/
|
||||
sadd(key: string | ArrayBufferView | Blob, member: string): Promise<number>;
|
||||
sadd(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove a member from a set
|
||||
@@ -323,28 +300,28 @@ declare module "bun" {
|
||||
* @param member The member to remove
|
||||
* @returns Promise that resolves with 1 if the member was removed, 0 if it didn't exist
|
||||
*/
|
||||
srem(key: string | ArrayBufferView | Blob, member: string): Promise<number>;
|
||||
srem(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get all the members in a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with an array of all members
|
||||
*/
|
||||
smembers(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
smembers(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Get a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
*/
|
||||
srandmember(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
srandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the removed member, or null if the set is empty
|
||||
*/
|
||||
spop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
spop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Increment the integer value of a hash field by the given number
|
||||
@@ -353,7 +330,7 @@ declare module "bun" {
|
||||
* @param increment The amount to increment by
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
hincrby(key: string | ArrayBufferView | Blob, field: string, increment: string | number): Promise<number>;
|
||||
hincrby(key: RedisClient.KeyLike, field: string, increment: string | number): Promise<number>;
|
||||
|
||||
/**
|
||||
* Increment the float value of a hash field by the given amount
|
||||
@@ -362,35 +339,35 @@ declare module "bun" {
|
||||
* @param increment The amount to increment by
|
||||
* @returns Promise that resolves with the new value as a string
|
||||
*/
|
||||
hincrbyfloat(key: string | ArrayBufferView | Blob, field: string, increment: string | number): Promise<string>;
|
||||
hincrbyfloat(key: RedisClient.KeyLike, field: string, increment: string | number): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get all the fields and values in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an object containing all fields and values
|
||||
*/
|
||||
hgetall(key: string | ArrayBufferView | Blob): Promise<Record<string, string> | null>;
|
||||
hgetall(key: RedisClient.KeyLike): Promise<Record<string, string> | null>;
|
||||
|
||||
/**
|
||||
* Get all field names in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an array of field names
|
||||
*/
|
||||
hkeys(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
hkeys(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Get the number of fields in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with the number of fields
|
||||
*/
|
||||
hlen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
hlen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get all values in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an array of values
|
||||
*/
|
||||
hvals(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
hvals(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Find all keys matching the given pattern
|
||||
@@ -404,84 +381,84 @@ declare module "bun" {
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the length of the list
|
||||
*/
|
||||
llen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
llen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and get the first element in a list
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the first element, or null if the list is empty
|
||||
*/
|
||||
lpop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
lpop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove the expiration from a key
|
||||
* @param key The key to persist
|
||||
* @returns Promise that resolves with 1 if the timeout was removed, 0 if the key doesn't exist or has no timeout
|
||||
*/
|
||||
persist(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
persist(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pexpiretime(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
pexpiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the time to live for a key in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the TTL in milliseconds, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pttl(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
pttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and get the last element in a list
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the last element, or null if the list is empty
|
||||
*/
|
||||
rpop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
rpop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the number of members in a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the set
|
||||
*/
|
||||
scard(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
scard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the length of the value stored in a key
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the length of the string value, or 0 if the key doesn't exist
|
||||
*/
|
||||
strlen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
strlen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the number of members in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the sorted set
|
||||
*/
|
||||
zcard(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
zcard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the highest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
*/
|
||||
zpopmax(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zpopmax(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the lowest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
*/
|
||||
zpopmin(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zpopmin(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get one or multiple random members from a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
*/
|
||||
zrandmember(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zrandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Append a value to a key
|
||||
@@ -489,7 +466,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the string after the append operation
|
||||
*/
|
||||
append(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
append(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set the value of a key and return its old value
|
||||
@@ -497,7 +474,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with the old value, or null if the key didn't exist
|
||||
*/
|
||||
getset(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getset(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Prepend one or multiple values to a list
|
||||
@@ -505,7 +482,7 @@ declare module "bun" {
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
*/
|
||||
lpush(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
lpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Prepend a value to a list, only if the list exists
|
||||
@@ -513,7 +490,7 @@ declare module "bun" {
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
lpushx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
lpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Add one or more members to a HyperLogLog
|
||||
@@ -521,7 +498,7 @@ declare module "bun" {
|
||||
* @param element The element to add
|
||||
* @returns Promise that resolves with 1 if the HyperLogLog was altered, 0 otherwise
|
||||
*/
|
||||
pfadd(key: string | ArrayBufferView | Blob, element: string): Promise<number>;
|
||||
pfadd(key: RedisClient.KeyLike, element: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Append one or multiple values to a list
|
||||
@@ -529,7 +506,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
*/
|
||||
rpush(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
rpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Append a value to a list, only if the list exists
|
||||
@@ -537,7 +514,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
rpushx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
rpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set the value of a key, only if the key does not exist
|
||||
@@ -545,7 +522,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with 1 if the key was set, 0 if the key was not set
|
||||
*/
|
||||
setnx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
setnx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the score associated with the given member in a sorted set
|
||||
@@ -553,49 +530,62 @@ declare module "bun" {
|
||||
* @param member The member to get the score for
|
||||
* @returns Promise that resolves with the score of the member as a string, or null if the member or key doesn't exist
|
||||
*/
|
||||
zscore(key: string | ArrayBufferView | Blob, member: string): Promise<string | null>;
|
||||
zscore(key: RedisClient.KeyLike, member: string): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all specified keys
|
||||
* @param keys The keys to get
|
||||
* @returns Promise that resolves with an array of values, with null for keys that don't exist
|
||||
*/
|
||||
mget(...keys: (string | ArrayBufferView | Blob)[]): Promise<(string | null)[]>;
|
||||
mget(...keys: RedisClient.KeyLike[]): Promise<(string | null)[]>;
|
||||
|
||||
/**
|
||||
* Count the number of set bits (population counting) in a string
|
||||
* @param key The key to count bits in
|
||||
* @returns Promise that resolves with the number of bits set to 1
|
||||
*/
|
||||
bitcount(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
bitcount(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Return a serialized version of the value stored at the specified key
|
||||
* @param key The key to dump
|
||||
* @returns Promise that resolves with the serialized value, or null if the key doesn't exist
|
||||
*/
|
||||
dump(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
dump(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in seconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
expiretime(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
expiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and delete the key
|
||||
* @param key The key to get and delete
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getdel(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getdel(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and optionally set its expiration
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getex(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Ping the server
|
||||
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(): Promise<"PONG">;
|
||||
|
||||
/**
|
||||
* Ping the server with a message
|
||||
* @param message The message to send to the server
|
||||
* @returns Promise that resolves with the message if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(message: RedisClient.KeyLike): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,4 +4,16 @@ import pkg from "../package.json";
|
||||
|
||||
const BUN_VERSION = (process.env.BUN_VERSION || Bun.version || process.versions.bun).replace(/^.*v/, "");
|
||||
|
||||
Bun.write(join(import.meta.dir, "..", "package.json"), JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2));
|
||||
let claude = Bun.file(join(import.meta.dir, "..", "CLAUDE.md"));
|
||||
if (await claude.exists()) {
|
||||
let original = await claude.text();
|
||||
const endOfFrontMatter = original.lastIndexOf("---\n");
|
||||
original = original.replaceAll("node_modules/bun-types/", "");
|
||||
if (endOfFrontMatter > -1) {
|
||||
original = original.slice(endOfFrontMatter + "---\n".length).trim() + "\n";
|
||||
}
|
||||
|
||||
await claude.write(original);
|
||||
}
|
||||
|
||||
await Bun.write(join(import.meta.dir, "..", "package.json"), JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2));
|
||||
|
||||
79
packages/bun-types/sqlite.d.ts
vendored
79
packages/bun-types/sqlite.d.ts
vendored
@@ -764,6 +764,79 @@ declare module "bun:sqlite" {
|
||||
*/
|
||||
readonly paramsCount: number;
|
||||
|
||||
/**
|
||||
* The actual SQLite column types from the first row of the result set.
|
||||
* Useful for expressions and computed columns, which are not covered by `declaredTypes`
|
||||
*
|
||||
* Returns an array of SQLite type constants as uppercase strings:
|
||||
* - `"INTEGER"` for integer values
|
||||
* - `"FLOAT"` for floating-point values
|
||||
* - `"TEXT"` for text values
|
||||
* - `"BLOB"` for binary data
|
||||
* - `"NULL"` for null values
|
||||
* - `null` for unknown/unsupported types
|
||||
*
|
||||
* **Requirements:**
|
||||
* - Only available for read-only statements (SELECT queries)
|
||||
* - For non-read-only statements, throws an error
|
||||
*
|
||||
* **Behavior:**
|
||||
* - Uses `sqlite3_column_type()` to get actual data types from the first row
|
||||
* - Returns `null` for columns with unknown SQLite type constants
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stmt = db.prepare("SELECT id, name, age FROM users WHERE id = 1");
|
||||
*
|
||||
* console.log(stmt.columnTypes);
|
||||
* // => ["INTEGER", "TEXT", "INTEGER"]
|
||||
*
|
||||
* // For expressions:
|
||||
* const exprStmt = db.prepare("SELECT length('bun') AS str_length");
|
||||
* console.log(exprStmt.columnTypes);
|
||||
* // => ["INTEGER"]
|
||||
* ```
|
||||
*
|
||||
* @throws Error if statement is not read-only (INSERT, UPDATE, DELETE, etc.)
|
||||
* @since Bun v1.2.13
|
||||
*/
|
||||
readonly columnTypes: Array<"INTEGER" | "FLOAT" | "TEXT" | "BLOB" | "NULL" | null>;
|
||||
|
||||
/**
|
||||
* The declared column types from the table schema.
|
||||
*
|
||||
* Returns an array of declared type strings from `sqlite3_column_decltype()`:
|
||||
* - Raw type strings as declared in the CREATE TABLE statement
|
||||
* - `null` for columns without declared types (e.g., expressions, computed columns)
|
||||
*
|
||||
* **Requirements:**
|
||||
* - Statement must be executed at least once before accessing this property
|
||||
* - Available for both read-only and read-write statements
|
||||
*
|
||||
* **Behavior:**
|
||||
* - Uses `sqlite3_column_decltype()` to get schema-declared types
|
||||
* - Returns the exact type string from the table definition
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // For table columns:
|
||||
* const stmt = db.prepare("SELECT id, name, weight FROM products");
|
||||
* stmt.get();
|
||||
* console.log(stmt.declaredTypes);
|
||||
* // => ["INTEGER", "TEXT", "REAL"]
|
||||
*
|
||||
* // For expressions (no declared types):
|
||||
* const exprStmt = db.prepare("SELECT length('bun') AS str_length");
|
||||
* exprStmt.get();
|
||||
* console.log(exprStmt.declaredTypes);
|
||||
* // => [null]
|
||||
* ```
|
||||
*
|
||||
* @throws Error if statement hasn't been executed
|
||||
* @since Bun v1.2.13
|
||||
*/
|
||||
readonly declaredTypes: Array<string | null>;
|
||||
|
||||
/**
|
||||
* Finalize the prepared statement, freeing the resources used by the
|
||||
* statement and preventing it from being executed again.
|
||||
@@ -840,6 +913,12 @@ declare module "bun:sqlite" {
|
||||
* Native object representing the underlying `sqlite3_stmt`
|
||||
*
|
||||
* This is left untyped because the ABI of the native bindings may change at any time.
|
||||
*
|
||||
* For stable, typed access to statement metadata, use the typed properties on the Statement class:
|
||||
* - {@link columnNames} for column names
|
||||
* - {@link paramsCount} for parameter count
|
||||
* - {@link columnTypes} for actual data types from the first row
|
||||
* - {@link declaredTypes} for schema-declared column types
|
||||
*/
|
||||
readonly native: any;
|
||||
}
|
||||
|
||||
20
packages/bun-types/test.d.ts
vendored
20
packages/bun-types/test.d.ts
vendored
@@ -88,15 +88,19 @@ declare module "bun:test" {
|
||||
*/
|
||||
export function setSystemTime(now?: Date | number): ThisType<void>;
|
||||
|
||||
interface Jest {
|
||||
restoreAllMocks(): void;
|
||||
clearAllMocks(): void;
|
||||
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
setSystemTime(now?: number | Date): void;
|
||||
setTimeout(milliseconds: number): void;
|
||||
}
|
||||
export const jest: Jest;
|
||||
export namespace jest {
|
||||
function restoreAllMocks(): void;
|
||||
function clearAllMocks(): void;
|
||||
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
function setSystemTime(now?: number | Date): void;
|
||||
function setTimeout(milliseconds: number): void;
|
||||
function useFakeTimers(): void;
|
||||
function useRealTimers(): void;
|
||||
function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
/**
|
||||
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
|
||||
*/
|
||||
|
||||
@@ -504,7 +504,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
}
|
||||
|
||||
struct addrinfo_request* ai_req;
|
||||
if (Bun__addrinfo_get(loop, host, &ai_req) == 0) {
|
||||
if (Bun__addrinfo_get(loop, host, (uint16_t)port, &ai_req) == 0) {
|
||||
// fast path for cached results
|
||||
struct addrinfo_result *result = Bun__addrinfo_getRequestResult(ai_req);
|
||||
// fast failure path
|
||||
|
||||
@@ -44,10 +44,7 @@ void *sni_find(void *sni, const char *hostname);
|
||||
#include <wolfssl/options.h>
|
||||
#endif
|
||||
|
||||
#include "./root_certs.h"
|
||||
|
||||
/* These are in root_certs.cpp */
|
||||
extern X509_STORE *us_get_default_ca_store();
|
||||
#include "./root_certs_header.h"
|
||||
|
||||
struct loop_ssl_data {
|
||||
char *ssl_read_input, *ssl_read_output;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
// MSVC doesn't support C11 stdatomic.h propertly yet.
|
||||
// so we use C++ std::atomic instead.
|
||||
#include "./root_certs.h"
|
||||
#include "./root_certs_header.h"
|
||||
#include "./internal/internal.h"
|
||||
#include <atomic>
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/x509.h>
|
||||
#include <string.h>
|
||||
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
|
||||
|
||||
@@ -134,6 +133,23 @@ extern "C" int us_internal_raw_root_certs(struct us_cert_string_t **out) {
|
||||
return root_certs_size;
|
||||
}
|
||||
|
||||
struct us_default_ca_certificates {
|
||||
X509 *root_cert_instances[root_certs_size];
|
||||
STACK_OF(X509) *root_extra_cert_instances;
|
||||
};
|
||||
|
||||
us_default_ca_certificates* us_get_default_ca_certificates() {
|
||||
static us_default_ca_certificates default_ca_certificates = {{NULL}, NULL};
|
||||
|
||||
us_internal_init_root_certs(default_ca_certificates.root_cert_instances, default_ca_certificates.root_extra_cert_instances);
|
||||
|
||||
return &default_ca_certificates;
|
||||
}
|
||||
|
||||
STACK_OF(X509) *us_get_root_extra_cert_instances() {
|
||||
return us_get_default_ca_certificates()->root_extra_cert_instances;
|
||||
}
|
||||
|
||||
extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
X509_STORE *store = X509_STORE_new();
|
||||
if (store == NULL) {
|
||||
@@ -145,10 +161,9 @@ extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static X509 *root_cert_instances[root_certs_size] = {NULL};
|
||||
static STACK_OF(X509) *root_extra_cert_instances = NULL;
|
||||
|
||||
us_internal_init_root_certs(root_cert_instances, root_extra_cert_instances);
|
||||
us_default_ca_certificates *default_ca_certificates = us_get_default_ca_certificates();
|
||||
X509** root_cert_instances = default_ca_certificates->root_cert_instances;
|
||||
STACK_OF(X509) *root_extra_cert_instances = default_ca_certificates->root_extra_cert_instances;
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Maintaining the root certificates
|
||||
// Maintaining the root certificates
|
||||
//
|
||||
// `src/crypto/root_certs.h` contains a compiled-in set of root certificates used as trust anchors
|
||||
// for TLS certificate validation.
|
||||
@@ -23,7 +23,7 @@
|
||||
// `src/crypto/root_certs.h`.
|
||||
// * Using `git diff-files` to determine which certificate have been added and/or
|
||||
// removed.
|
||||
//
|
||||
//
|
||||
#include "libusockets.h"
|
||||
static struct us_cert_string_t root_certs[] = {
|
||||
|
||||
|
||||
13
packages/bun-usockets/src/crypto/root_certs_header.h
Normal file
13
packages/bun-usockets/src/crypto/root_certs_header.h
Normal file
@@ -0,0 +1,13 @@
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/x509.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#define CPPDECL extern "C"
|
||||
|
||||
STACK_OF(X509) *us_get_root_extra_cert_instances();
|
||||
|
||||
#else
|
||||
#define CPPDECL extern
|
||||
#endif
|
||||
|
||||
CPPDECL X509_STORE *us_get_default_ca_store();
|
||||
@@ -110,7 +110,7 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
|
||||
}
|
||||
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
#include <sys/syscall.h>
|
||||
#include <signal.h>
|
||||
@@ -131,9 +131,9 @@ extern ssize_t sys_epoll_pwait2(int epfd, struct epoll_event* events, int maxeve
|
||||
|
||||
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
|
||||
int ret;
|
||||
sigset_t mask;
|
||||
sigset_t mask;
|
||||
sigemptyset(&mask);
|
||||
|
||||
|
||||
if (has_epoll_pwait2 != 0) {
|
||||
do {
|
||||
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, &mask);
|
||||
@@ -146,7 +146,7 @@ static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents,
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
|
||||
int timeoutMs = -1;
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
@@ -178,7 +178,7 @@ struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t
|
||||
if (has_epoll_pwait2 == -1) {
|
||||
if (Bun__isEpollPwait2SupportedOnLinuxKernel() == 0) {
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
@@ -358,16 +358,16 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
if ((new_events & LIBUS_SOCKET_READABLE) != (old_events & LIBUS_SOCKET_READABLE)) {
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_READ, is_readable ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
|
||||
|
||||
if(!is_readable && !is_writable) {
|
||||
if(!(old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
// if we are not reading or writing, we need to add writable to receive FIN
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, EV_ADD, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
} else if ((new_events & LIBUS_SOCKET_WRITABLE) != (old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
/* Do they differ in writable? */
|
||||
/* Do they differ in writable? */
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
}
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
@@ -673,7 +673,7 @@ struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int f
|
||||
// using it for notifications and not for any other purpose.
|
||||
mach_port_limits_t limits = { .mpl_qlimit = 1 };
|
||||
kr = mach_port_set_attributes(self, cb->port, MACH_PORT_LIMITS_INFO, (mach_port_info_t)&limits, MACH_PORT_LIMITS_INFO_COUNT);
|
||||
|
||||
|
||||
if (UNLIKELY(kr != KERN_SUCCESS)) {
|
||||
return NULL;
|
||||
}
|
||||
@@ -688,7 +688,7 @@ void us_internal_async_close(struct us_internal_async *a) {
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
|
||||
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
@@ -720,7 +720,7 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
|
||||
event.ext[1] = MACHPORT_BUF_LEN;
|
||||
event.udata = (uint64_t)(void*)internal_cb;
|
||||
|
||||
int ret;
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
@@ -750,12 +750,12 @@ void us_internal_async_wakeup(struct us_internal_async *a) {
|
||||
0, // Fail instantly if the port is full
|
||||
MACH_PORT_NULL
|
||||
);
|
||||
|
||||
|
||||
switch (kr) {
|
||||
case KERN_SUCCESS: {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// This means that the send would've blocked because the
|
||||
// queue is full. We assume success because the port is full.
|
||||
case MACH_SEND_TIMED_OUT: {
|
||||
|
||||
@@ -107,8 +107,8 @@ struct addrinfo_result {
|
||||
#define us_internal_ssl_socket_context_r struct us_internal_ssl_socket_context_t *nonnull_arg
|
||||
#define us_internal_ssl_socket_r struct us_internal_ssl_socket_t *nonnull_arg
|
||||
|
||||
extern int Bun__addrinfo_get(struct us_loop_t* loop, const char* host, struct addrinfo_request** ptr);
|
||||
extern int Bun__addrinfo_set(struct addrinfo_request* ptr, struct us_connecting_socket_t* socket);
|
||||
extern int Bun__addrinfo_get(struct us_loop_t* loop, const char* host, uint16_t port, struct addrinfo_request** ptr);
|
||||
extern int Bun__addrinfo_set(struct addrinfo_request* ptr, struct us_connecting_socket_t* socket);
|
||||
extern void Bun__addrinfo_freeRequest(struct addrinfo_request* addrinfo_req, int error);
|
||||
extern struct addrinfo_result *Bun__addrinfo_getRequestResult(struct addrinfo_request* addrinfo_req);
|
||||
|
||||
@@ -158,7 +158,7 @@ void us_internal_socket_after_open(us_socket_r s, int error);
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code,
|
||||
void *reason);
|
||||
|
||||
|
||||
int us_internal_handle_dns_results(us_loop_r loop);
|
||||
|
||||
/* Sockets are polls */
|
||||
@@ -167,9 +167,9 @@ struct us_socket_flags {
|
||||
/* If true, the readable side is paused */
|
||||
bool is_paused: 1;
|
||||
/* Allow to stay alive after FIN/EOF */
|
||||
bool allow_half_open: 1;
|
||||
bool allow_half_open: 1;
|
||||
/* 0 = not in low-prio queue, 1 = is in low-prio queue, 2 = was in low-prio queue in this iteration */
|
||||
unsigned char low_prio_state: 2;
|
||||
unsigned char low_prio_state: 2;
|
||||
/* If true, the socket should be read using readmsg to support receiving file descriptors */
|
||||
bool is_ipc: 1;
|
||||
|
||||
@@ -299,7 +299,7 @@ struct us_socket_context_t {
|
||||
struct us_connecting_socket_t *(*on_connect_error)(struct us_connecting_socket_t *, int code);
|
||||
struct us_socket_t *(*on_socket_connect_error)(struct us_socket_t *, int code);
|
||||
int (*is_low_prio)(struct us_socket_t *);
|
||||
|
||||
|
||||
};
|
||||
|
||||
/* Internal SSL interface */
|
||||
@@ -310,7 +310,7 @@ struct us_internal_ssl_socket_t;
|
||||
typedef void (*us_internal_on_handshake_t)(
|
||||
struct us_internal_ssl_socket_t *, int success,
|
||||
struct us_bun_verify_error_t verify_error, void *custom_data);
|
||||
|
||||
|
||||
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context);
|
||||
/* SNI functions */
|
||||
void us_internal_ssl_socket_context_add_server_name(
|
||||
|
||||
@@ -128,7 +128,7 @@ void us_internal_timer_sweep(struct us_loop_t *loop) {
|
||||
if (context->iterator == s && long_ticks == s->long_timeout) {
|
||||
s->long_timeout = 255;
|
||||
if (context->on_socket_long_timeout != NULL) context->on_socket_long_timeout(s);
|
||||
}
|
||||
}
|
||||
|
||||
/* Check for unlink / link (if the event handler did not modify the chain, we step 1) */
|
||||
if (s == context->iterator) {
|
||||
@@ -398,20 +398,20 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
struct msghdr msg = {0};
|
||||
struct iovec iov = {0};
|
||||
char cmsg_buf[CMSG_SPACE(sizeof(int))];
|
||||
|
||||
|
||||
iov.iov_base = loop->data.recv_buf + LIBUS_RECV_BUFFER_PADDING;
|
||||
iov.iov_len = LIBUS_RECV_BUFFER_LENGTH;
|
||||
|
||||
msg.msg_flags = 0;
|
||||
msg.msg_flags = 0;
|
||||
msg.msg_iov = &iov;
|
||||
msg.msg_iovlen = 1;
|
||||
msg.msg_name = NULL;
|
||||
msg.msg_namelen = 0;
|
||||
msg.msg_controllen = CMSG_LEN(sizeof(int));
|
||||
msg.msg_control = cmsg_buf;
|
||||
|
||||
|
||||
length = bsd_recvmsg(us_poll_fd(&s->p), &msg, recv_flags);
|
||||
|
||||
|
||||
// Extract file descriptor if present
|
||||
if (length > 0 && msg.msg_controllen > 0) {
|
||||
struct cmsghdr *cmsg_ptr = CMSG_FIRSTHDR(&msg);
|
||||
@@ -439,14 +439,14 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
// - the event loop isn't very busy, so we can read multiple times in a row
|
||||
#define LOOP_ISNT_VERY_BUSY_THRESHOLD 25
|
||||
if (
|
||||
s && length >= (LIBUS_RECV_BUFFER_LENGTH - 24 * 1024) && length <= LIBUS_RECV_BUFFER_LENGTH &&
|
||||
(error || loop->num_ready_polls < LOOP_ISNT_VERY_BUSY_THRESHOLD) &&
|
||||
s && length >= (LIBUS_RECV_BUFFER_LENGTH - 24 * 1024) && length <= LIBUS_RECV_BUFFER_LENGTH &&
|
||||
(error || loop->num_ready_polls < LOOP_ISNT_VERY_BUSY_THRESHOLD) &&
|
||||
!us_socket_is_closed(0, s)
|
||||
) {
|
||||
repeat_recv_count += error == 0;
|
||||
|
||||
// When not hung up, read a maximum of 10 times to avoid starving other sockets
|
||||
// We don't bother with ioctl(FIONREAD) because we've set MSG_DONTWAIT
|
||||
// We don't bother with ioctl(FIONREAD) because we've set MSG_DONTWAIT
|
||||
if (!(repeat_recv_count > 10 && loop->num_ready_polls > 2)) {
|
||||
continue;
|
||||
}
|
||||
@@ -486,7 +486,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* Such as epollerr or EV_ERROR */
|
||||
if (error && s) {
|
||||
/* Todo: decide what code we give here */
|
||||
|
||||
@@ -387,29 +387,29 @@ int us_socket_ipc_write_fd(struct us_socket_t *s, const char* data, int length,
|
||||
struct msghdr msg = {0};
|
||||
struct iovec iov = {0};
|
||||
char cmsgbuf[CMSG_SPACE(sizeof(int))];
|
||||
|
||||
|
||||
iov.iov_base = (void*)data;
|
||||
iov.iov_len = length;
|
||||
|
||||
|
||||
msg.msg_iov = &iov;
|
||||
msg.msg_iovlen = 1;
|
||||
msg.msg_control = cmsgbuf;
|
||||
msg.msg_controllen = CMSG_SPACE(sizeof(int));
|
||||
|
||||
|
||||
struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);
|
||||
cmsg->cmsg_level = SOL_SOCKET;
|
||||
cmsg->cmsg_type = SCM_RIGHTS;
|
||||
cmsg->cmsg_len = CMSG_LEN(sizeof(int));
|
||||
|
||||
|
||||
*(int *)CMSG_DATA(cmsg) = fd;
|
||||
|
||||
|
||||
int sent = bsd_sendmsg(us_poll_fd(&s->p), &msg, 0);
|
||||
|
||||
|
||||
if (sent != length) {
|
||||
s->context->loop->data.last_write_failed = 1;
|
||||
us_poll_change(&s->p, s->context->loop, LIBUS_SOCKET_READABLE | LIBUS_SOCKET_WRITABLE);
|
||||
}
|
||||
|
||||
|
||||
return sent < 0 ? 0 : sent;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -249,7 +249,7 @@ public:
|
||||
}
|
||||
|
||||
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
|
||||
|
||||
|
||||
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
|
||||
if (!httpContext) {
|
||||
return nullptr;
|
||||
@@ -646,4 +646,3 @@ typedef TemplatedApp<false> App;
|
||||
typedef TemplatedApp<true> SSLApp;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -224,7 +224,7 @@ public:
|
||||
|
||||
/**
|
||||
* Flushes the socket buffer by writing as much data as possible to the underlying socket.
|
||||
*
|
||||
*
|
||||
* @return The total number of bytes successfully written to the socket
|
||||
*/
|
||||
size_t flush() {
|
||||
@@ -237,30 +237,30 @@ public:
|
||||
/* Get the associated asynchronous socket data structure */
|
||||
AsyncSocketData<SSL> *asyncSocketData = getAsyncSocketData();
|
||||
size_t total_written = 0;
|
||||
|
||||
|
||||
/* Continue flushing as long as we have data in the buffer */
|
||||
while (asyncSocketData->buffer.length()) {
|
||||
/* Get current buffer size */
|
||||
size_t buffer_len = asyncSocketData->buffer.length();
|
||||
|
||||
|
||||
/* Limit write size to INT_MAX as the underlying socket API uses int for length */
|
||||
int max_flush_len = std::min(buffer_len, (size_t)INT_MAX);
|
||||
|
||||
/* Attempt to write data to the socket */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, 0);
|
||||
total_written += written;
|
||||
|
||||
|
||||
/* Check if we couldn't write the entire buffer */
|
||||
if ((unsigned int) written < buffer_len) {
|
||||
/* Remove the successfully written data from the buffer */
|
||||
asyncSocketData->buffer.erase((unsigned int) written);
|
||||
|
||||
|
||||
/* If we wrote less than we attempted, the socket buffer is likely full
|
||||
* likely is used as an optimization hint to the compiler
|
||||
* since written < buffer_len is very likely to be true
|
||||
*/
|
||||
if(written < max_flush_len) {
|
||||
[[likely]]
|
||||
[[likely]]
|
||||
/* Cannot write more at this time, return what we've written so far */
|
||||
return total_written;
|
||||
}
|
||||
@@ -317,7 +317,7 @@ public:
|
||||
asyncSocketData->buffer.clear();
|
||||
}
|
||||
|
||||
if (length) {
|
||||
if (length) {
|
||||
if (loopData->isCorkedWith(this)) {
|
||||
/* We are corked */
|
||||
if (LoopData::CORK_BUFFER_SIZE - loopData->getCorkOffset() >= (unsigned int) length) {
|
||||
|
||||
@@ -52,7 +52,7 @@ public:
|
||||
if (key.length() < 2) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
ScrambleArea s = getFeatures(key);
|
||||
s.val = perfectHash(s.val);
|
||||
return filter[s.p[0]] &&
|
||||
|
||||
@@ -76,7 +76,7 @@ namespace uWS {
|
||||
data.remove_prefix(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
auto len = data.length();
|
||||
if(len) {
|
||||
// consume extension
|
||||
@@ -93,20 +93,20 @@ namespace uWS {
|
||||
/* RFC 9110: Token format (TLDR; anything bellow 32 is not allowed)
|
||||
* TODO: add support for quoted-strings values (RFC 9110: 3.2.6. Quoted-String)
|
||||
* Example of chunked encoding with extensions:
|
||||
*
|
||||
*
|
||||
* 4;key=value\r\n
|
||||
* Wiki\r\n
|
||||
* 5;foo=bar;baz=quux\r\n
|
||||
* pedia\r\n
|
||||
* 0\r\n
|
||||
* \r\n
|
||||
*
|
||||
*
|
||||
* The chunk size is in hex (4, 5, 0), followed by optional
|
||||
* semicolon-separated extensions. Extensions consist of a key
|
||||
* (token) and optional value. The value may be a token or a
|
||||
* quoted string. The chunk data follows the CRLF after the
|
||||
* extensions and must be exactly the size specified.
|
||||
*
|
||||
*
|
||||
* RFC 7230 Section 4.1.1 defines chunk extensions as:
|
||||
* chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
|
||||
* chunk-ext-name = token
|
||||
@@ -116,7 +116,7 @@ namespace uWS {
|
||||
state = STATE_IS_ERROR;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
data.remove_prefix(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,5 +17,5 @@ namespace uWS {
|
||||
//printf("Constructing http3contextdata: %p\n", this);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
@@ -77,7 +77,7 @@ namespace uWS {
|
||||
|
||||
/* If not already written */
|
||||
writeStatus("200 OK");
|
||||
|
||||
|
||||
// has body is determined by the ending so this is perfect here
|
||||
us_quic_socket_context_send_headers(nullptr, (us_quic_stream_t *) this, responseData->headerOffset, data.length() > 0);
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ namespace uWS {
|
||||
|
||||
/* Status is always first header just like for h1 */
|
||||
unsigned int headerOffset = 0;
|
||||
|
||||
|
||||
/* Write offset */
|
||||
uint64_t offset = 0;
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ private:
|
||||
MACRO("UNLINK") \
|
||||
MACRO("UNLOCK") \
|
||||
MACRO("UNSUBSCRIBE") \
|
||||
|
||||
|
||||
|
||||
#ifndef _WIN32
|
||||
static constexpr std::array<const std::string, 35> HTTP_METHODS = {
|
||||
@@ -108,12 +108,12 @@ private:
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
};
|
||||
|
||||
|
||||
static std::span<const std::string> getAllHttpMethods() {
|
||||
static std::once_flag flag;
|
||||
static std::array<std::string, 35> methods;
|
||||
std::call_once(flag, []() {
|
||||
methods = {
|
||||
methods = {
|
||||
#define MACRO(name) std::string {name},
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
@@ -201,7 +201,7 @@ private:
|
||||
/* Call filter */
|
||||
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
|
||||
|
||||
|
||||
|
||||
for (auto &f : httpContextData->filterHandlers) {
|
||||
f((HttpResponse<SSL> *) s, -1);
|
||||
}
|
||||
@@ -276,7 +276,7 @@ private:
|
||||
|
||||
/* Mark pending request and emit it */
|
||||
httpResponseData->state = HttpResponseData<SSL>::HTTP_RESPONSE_PENDING;
|
||||
|
||||
|
||||
|
||||
/* Mark this response as connectionClose if ancient or connection: close */
|
||||
if (httpRequest->isAncient() || httpRequest->getHeader("connection").length() == 5) {
|
||||
@@ -336,7 +336,7 @@ private:
|
||||
}, [httpResponseData](void *user, std::string_view data, bool fin) -> void * {
|
||||
/* We always get an empty chunk even if there is no data */
|
||||
if (httpResponseData->inStream) {
|
||||
|
||||
|
||||
/* Todo: can this handle timeout for non-post as well? */
|
||||
if (fin) {
|
||||
/* If we just got the last chunk (or empty chunk), disable timeout */
|
||||
@@ -374,7 +374,7 @@ private:
|
||||
});
|
||||
|
||||
auto httpErrorStatusCode = result.httpErrorStatusCode();
|
||||
|
||||
|
||||
/* Mark that we are no longer parsing Http */
|
||||
httpContextData->flags.isParsingHttp = false;
|
||||
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
|
||||
@@ -388,7 +388,7 @@ private:
|
||||
/* Close any socket on HTTP errors */
|
||||
us_socket_close(SSL, s, 0, nullptr);
|
||||
}
|
||||
|
||||
|
||||
auto returnedData = result.returnedData;
|
||||
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
|
||||
if (returnedData != nullptr) {
|
||||
@@ -456,10 +456,9 @@ private:
|
||||
size_t bufferedAmount = asyncSocket->getBufferedAmount();
|
||||
if (bufferedAmount > 0) {
|
||||
/* Try to flush pending data from the socket's buffer to the network */
|
||||
bufferedAmount -= asyncSocket->flush();
|
||||
|
||||
asyncSocket->flush();
|
||||
/* Check if there's still data waiting to be sent after flush attempt */
|
||||
if (bufferedAmount > 0) {
|
||||
if (asyncSocket->getBufferedAmount() > 0) {
|
||||
/* Socket buffer is not completely empty yet
|
||||
* - Reset the timeout to prevent premature connection closure
|
||||
* - This allows time for another writable event or new request
|
||||
@@ -472,12 +471,12 @@ private:
|
||||
* and will fall through to the next section of code
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
/* Ask the developer to write data and return success (true) or failure (false), OR skip sending anything and return success (true). */
|
||||
if (httpResponseData->onWritable) {
|
||||
/* We are now writable, so hang timeout again, the user does not have to do anything so we should hang until end or tryEnd rearms timeout */
|
||||
us_socket_timeout(SSL, s, 0);
|
||||
|
||||
|
||||
/* We expect the developer to return whether or not write was successful (true).
|
||||
* If write was never called, the developer should still return true so that we may drain. */
|
||||
bool success = httpResponseData->callOnWritable(reinterpret_cast<HttpResponse<SSL> *>(asyncSocket), httpResponseData->offset);
|
||||
@@ -498,6 +497,7 @@ private:
|
||||
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) == 0) {
|
||||
if (asyncSocket->getBufferedAmount() == 0) {
|
||||
|
||||
asyncSocket->shutdown();
|
||||
/* We need to force close after sending FIN since we want to hinder
|
||||
* clients from keeping to send their huge data */
|
||||
@@ -588,7 +588,7 @@ public:
|
||||
methods = getAllHttpMethods();
|
||||
} else {
|
||||
methods_buffer[0] = std::string(method);
|
||||
methods = {methods_buffer.data(), 1};
|
||||
methods = {methods_buffer.data(), 1};
|
||||
}
|
||||
|
||||
uint32_t priority = method == "*" ? httpContextData->currentRouter->LOW_PRIORITY : (upgrade ? httpContextData->currentRouter->HIGH_PRIORITY : httpContextData->currentRouter->MEDIUM_PRIORITY);
|
||||
@@ -616,7 +616,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets), httpContextData](auto *r) mutable {
|
||||
auto user = r->getUserData();
|
||||
@@ -667,5 +667,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -39,4 +39,3 @@ static const std::string_view httpErrorResponses[] = {
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -108,7 +108,7 @@ namespace uWS
|
||||
}
|
||||
|
||||
|
||||
/* Returns true if there was an error */
|
||||
/* Returns true if there was an error */
|
||||
bool isError() {
|
||||
return parserError != HTTP_PARSER_ERROR_NONE;
|
||||
}
|
||||
@@ -403,7 +403,7 @@ namespace uWS
|
||||
|
||||
static bool isValidMethod(std::string_view str, bool useStrictMethodValidation) {
|
||||
if (str.empty()) return false;
|
||||
|
||||
|
||||
if (useStrictMethodValidation) {
|
||||
return Bun__HTTPMethod__from(str.data(), str.length()) != -1;
|
||||
}
|
||||
@@ -613,22 +613,25 @@ namespace uWS
|
||||
return HttpParserResult::shortRead();
|
||||
}
|
||||
postPaddedBuffer = requestLineResult.position;
|
||||
|
||||
|
||||
if(requestLineResult.isAncientHTTP) {
|
||||
isAncientHTTP = true;
|
||||
}
|
||||
/* No request headers found */
|
||||
size_t buffer_size = end - postPaddedBuffer;
|
||||
const char * headerStart = (headers[0].key.length() > 0) ? headers[0].key.data() : end;
|
||||
|
||||
if(buffer_size < 2) {
|
||||
/* Fragmented request */
|
||||
return HttpParserResult::error(HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_REQUEST);
|
||||
|
||||
/* Check if we can see if headers follow or not */
|
||||
if (postPaddedBuffer + 2 > end) {
|
||||
/* Not enough data to check for \r\n */
|
||||
return HttpParserResult::shortRead();
|
||||
}
|
||||
if(buffer_size >= 2 && postPaddedBuffer[0] == '\r' && postPaddedBuffer[1] == '\n') {
|
||||
/* No headers found */
|
||||
|
||||
/* Check for empty headers (no headers, just \r\n) */
|
||||
if (postPaddedBuffer[0] == '\r' && postPaddedBuffer[1] == '\n') {
|
||||
/* Valid request with no headers */
|
||||
return HttpParserResult::success((unsigned int) ((postPaddedBuffer + 2) - start));
|
||||
}
|
||||
|
||||
headers++;
|
||||
|
||||
for (unsigned int i = 1; i < UWS_HTTP_MAX_HEADERS_COUNT - 1; i++) {
|
||||
@@ -708,7 +711,7 @@ namespace uWS
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
|
||||
if(postPaddedBuffer[0] == '\r') {
|
||||
// invalid char after \r
|
||||
return HttpParserResult::error(HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_REQUEST);
|
||||
@@ -754,7 +757,7 @@ namespace uWS
|
||||
|
||||
/* Add all headers to bloom filter */
|
||||
req->bf.reset();
|
||||
|
||||
|
||||
for (HttpRequest::Header *h = req->headers; (++h)->key.length(); ) {
|
||||
req->bf.add(h->key);
|
||||
}
|
||||
@@ -861,7 +864,7 @@ namespace uWS
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return HttpParserResult::success(consumedTotal, user);
|
||||
}
|
||||
|
||||
@@ -997,4 +1000,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -106,13 +106,13 @@ public:
|
||||
if (closeConnection) {
|
||||
/* We can only write the header once */
|
||||
if (!(httpResponseData->state & (HttpResponseData<SSL>::HTTP_END_CALLED))) {
|
||||
|
||||
|
||||
/* HTTP 1.1 must send this back unless the client already sent it to us.
|
||||
* It is a connection close when either of the two parties say so but the
|
||||
* one party must tell the other one so.
|
||||
*
|
||||
* This check also serves to limit writing the header only once. */
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0 && !(httpResponseData->state & (HttpResponseData<SSL>::HTTP_WRITE_CALLED))) {
|
||||
writeHeader("Connection", "close");
|
||||
}
|
||||
|
||||
@@ -125,14 +125,13 @@ public:
|
||||
|
||||
/* We do not have tryWrite-like functionalities, so ignore optional in this path */
|
||||
|
||||
|
||||
|
||||
/* Write the chunked data if there is any (this will not send zero chunks) */
|
||||
this->write(data, nullptr);
|
||||
|
||||
|
||||
|
||||
/* Terminating 0 chunk */
|
||||
Super::write("0\r\n\r\n", 5);
|
||||
|
||||
httpResponseData->markDone();
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
@@ -471,7 +470,7 @@ public:
|
||||
writeStatus(HTTP_200_OK);
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
|
||||
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER) && !httpResponseData->fromAncientRequest) {
|
||||
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
|
||||
/* Write mark on first call to write */
|
||||
@@ -533,7 +532,7 @@ public:
|
||||
}
|
||||
return !has_failed;
|
||||
}
|
||||
|
||||
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
@@ -546,7 +545,7 @@ public:
|
||||
Super::write("\r\n", 2);
|
||||
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
|
||||
}
|
||||
|
||||
|
||||
writeUnsignedHex((unsigned int) data.length());
|
||||
Super::write("\r\n", 2);
|
||||
} else if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
|
||||
@@ -579,14 +578,13 @@ public:
|
||||
// Write End of Chunked Encoding after data has been written
|
||||
Super::write("\r\n", 2);
|
||||
}
|
||||
|
||||
|
||||
/* Reset timeout on each sended chunk */
|
||||
this->resetTimeout();
|
||||
|
||||
if (writtenPtr) {
|
||||
*writtenPtr = total_written;
|
||||
}
|
||||
|
||||
/* If we did not fail the write, accept more */
|
||||
return !has_failed;
|
||||
}
|
||||
|
||||
@@ -109,5 +109,3 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ private:
|
||||
LoopData *loopData = (LoopData *) us_loop_ext((struct us_loop_t *) loop);
|
||||
loopData->dateTimer = us_create_timer((struct us_loop_t *) loop, 1, sizeof(LoopData *));
|
||||
loopData->updateDate();
|
||||
|
||||
|
||||
memcpy(us_timer_ext(loopData->dateTimer), &loopData, sizeof(LoopData *));
|
||||
us_timer_set(loopData->dateTimer, [](struct us_timer_t *t) {
|
||||
LoopData *loopData;
|
||||
@@ -103,7 +103,7 @@ private:
|
||||
~LoopCleaner() {
|
||||
// There's no need to call this destructor if Bun is in the process of exiting.
|
||||
// This is both a performance thing, and also to prevent freeing some things which are not meant to be freed
|
||||
// such as uv_tty_t
|
||||
// such as uv_tty_t
|
||||
if(loop && cleanMe && !bun_is_exiting()) {
|
||||
cleanMe = false;
|
||||
loop->free();
|
||||
|
||||
@@ -97,11 +97,11 @@ public:
|
||||
this->corkedSocket = nullptr;
|
||||
this->corkOffset = 0;
|
||||
}
|
||||
|
||||
|
||||
unsigned int getCorkOffset() {
|
||||
return this->corkOffset;
|
||||
}
|
||||
|
||||
|
||||
void setCorkOffset(unsigned int offset) {
|
||||
this->corkOffset = offset;
|
||||
}
|
||||
@@ -109,7 +109,7 @@ public:
|
||||
void incrementCorkedOffset(unsigned int offset) {
|
||||
this->corkOffset += offset;
|
||||
}
|
||||
|
||||
|
||||
char* getCorkBuffer() {
|
||||
return this->corkBuffer;
|
||||
}
|
||||
@@ -118,7 +118,6 @@ public:
|
||||
time_t now = time(0);
|
||||
struct tm tstruct = {};
|
||||
#ifdef _WIN32
|
||||
/* Micro, fucking soft never follows spec. */
|
||||
gmtime_s(&tstruct, &now);
|
||||
#else
|
||||
gmtime_r(&now, &tstruct);
|
||||
|
||||
@@ -260,7 +260,7 @@ public:
|
||||
/* This one always resets needsDrainage before it calls any cb's.
|
||||
* Otherwise we would stackoverflow when sending after publish but before drain. */
|
||||
drainImpl(s);
|
||||
|
||||
|
||||
/* If we drained last subscriber, also clear outgoingMessages */
|
||||
if (!drainableSubscribers) {
|
||||
outgoingMessages.clear();
|
||||
@@ -363,5 +363,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
63
scripts/gamble.ts
Executable file
63
scripts/gamble.ts
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bun
|
||||
// usage: bun scripts/gamble.ts <number of attempts> <timeout in seconds> <command>
|
||||
|
||||
import assert from "node:assert";
|
||||
|
||||
const attempts = parseInt(process.argv[2]);
|
||||
const timeout = parseFloat(process.argv[3]);
|
||||
const argv = process.argv.slice(4);
|
||||
|
||||
let numTimedOut = 0;
|
||||
const signals = new Map<string, number>();
|
||||
const codes = new Map<number, number>();
|
||||
let numOk = 0;
|
||||
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
const proc = Bun.spawn({
|
||||
cmd: argv,
|
||||
timeout: 1000 * timeout,
|
||||
stdin: null,
|
||||
stdout: "ignore",
|
||||
stderr: "pipe",
|
||||
});
|
||||
await proc.exited;
|
||||
const errors = await new Response(proc.stderr).text();
|
||||
|
||||
const { signalCode: signal, exitCode } = proc;
|
||||
|
||||
if (signal === "SIGTERM") {
|
||||
// sent for timeouts
|
||||
numTimedOut += 1;
|
||||
} else if (signal) {
|
||||
const newCount = 1 + (signals.get(signal) ?? 0);
|
||||
signals.set(signal, newCount);
|
||||
} else if (exitCode !== 0) {
|
||||
// if null there should have been a signal
|
||||
assert(exitCode !== null);
|
||||
const newCount = 1 + (codes.get(exitCode) ?? 0);
|
||||
codes.set(exitCode, newCount);
|
||||
} else {
|
||||
numOk += 1;
|
||||
}
|
||||
if (exitCode !== 0) console.log(errors);
|
||||
process.stdout.write(exitCode === 0 ? "." : "!");
|
||||
}
|
||||
process.stdout.write("\n");
|
||||
|
||||
const width = attempts.toString().length;
|
||||
const pad = (num: number): string => num.toString().padStart(width, " ");
|
||||
const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`);
|
||||
const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`);
|
||||
|
||||
green(`${pad(numOk)}/${attempts} OK`);
|
||||
if (numTimedOut > 0) {
|
||||
red(`${pad(numTimedOut)}/${attempts} timeout`);
|
||||
}
|
||||
for (const [signal, count] of signals.entries()) {
|
||||
red(`${pad(count)}/${attempts} ${signal}`);
|
||||
}
|
||||
for (const [code, count] of codes.entries()) {
|
||||
red(`${pad(count)}/${attempts} code ${code}`);
|
||||
}
|
||||
|
||||
process.exit(numOk === attempts ? 0 : 1);
|
||||
@@ -230,6 +230,27 @@ function getTestExpectations() {
|
||||
return expectations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateExceptions = (() => {
|
||||
let skipArray;
|
||||
return test => {
|
||||
if (!skipArray) {
|
||||
const path = join(cwd, "test/no-validate-exceptions.txt");
|
||||
if (!existsSync(path)) {
|
||||
skipArray = [];
|
||||
}
|
||||
skipArray = readFileSync(path, "utf-8")
|
||||
.split("\n")
|
||||
.filter(line => !line.startsWith("#"));
|
||||
}
|
||||
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
|
||||
};
|
||||
})();
|
||||
|
||||
/**
|
||||
* @param {string} testPath
|
||||
* @returns {string[]}
|
||||
@@ -416,16 +437,20 @@ async function runTests() {
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env: {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
@@ -953,13 +978,18 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
|
||||
testArgs.push(absPath);
|
||||
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env: {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
|
||||
@@ -219,6 +219,8 @@ comptime {
|
||||
}
|
||||
|
||||
pub export fn Bun__onExit() void {
|
||||
bun.JSC.Node.FSEvents.closeAndWait();
|
||||
|
||||
runExitCallbacks();
|
||||
Output.flush();
|
||||
std.mem.doNotOptimizeAway(&Bun__atexit);
|
||||
|
||||
@@ -21,6 +21,7 @@ side: ?bun.bake.Side,
|
||||
/// entrypoint like sourcemaps and bytecode
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
source_index: Index.Optional = .none,
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
|
||||
@@ -62,11 +63,19 @@ pub const FileOperation = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Kind = @typeInfo(Value).Union.tag_type.?;
|
||||
pub const Kind = enum {
|
||||
move,
|
||||
copy,
|
||||
noop,
|
||||
buffer,
|
||||
pending,
|
||||
saved,
|
||||
};
|
||||
|
||||
// TODO: document how and why all variants of this union(enum) are used,
|
||||
// specifically .move and .copy; the new bundler has to load files in memory
|
||||
// in order to hash them, so i think it uses .buffer for those
|
||||
pub const Value = union(enum) {
|
||||
pub const Value = union(Kind) {
|
||||
move: FileOperation,
|
||||
copy: FileOperation,
|
||||
noop: u0,
|
||||
@@ -177,6 +186,7 @@ pub const Options = struct {
|
||||
source_map_index: ?u32 = null,
|
||||
bytecode_index: ?u32 = null,
|
||||
output_path: string,
|
||||
source_index: Index.Optional = .none,
|
||||
size: ?usize = null,
|
||||
input_path: []const u8 = "",
|
||||
display_size: u32 = 0,
|
||||
@@ -205,6 +215,7 @@ pub fn init(options: Options) OutputFile {
|
||||
.input_loader = options.input_loader,
|
||||
.src_path = Fs.Path.init(options.input_path),
|
||||
.dest_path = options.output_path,
|
||||
.source_index = options.source_index,
|
||||
.size = options.size orelse switch (options.data) {
|
||||
.buffer => |buf| buf.data.len,
|
||||
.file => |file| file.size,
|
||||
@@ -310,7 +321,7 @@ pub fn toJS(
|
||||
) bun.JSC.JSValue {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => JSC.JSValue.undefined,
|
||||
.noop => .js_undefined,
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
|
||||
@@ -46,7 +46,7 @@ pub const HashType = u32;
|
||||
const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex);
|
||||
|
||||
/// Initializes a watcher. Each watcher is tied to some context type, which
|
||||
/// recieves watch callbacks on the watcher thread. This function does not
|
||||
/// receives watch callbacks on the watcher thread. This function does not
|
||||
/// actually start the watcher thread.
|
||||
///
|
||||
/// const watcher = try Watcher.init(T, instance_of_t, fs, bun.default_allocator)
|
||||
|
||||
@@ -1612,6 +1612,23 @@ pub const Api = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const UnhandledRejections = enum(u8) {
|
||||
strict = 0,
|
||||
throw = 1,
|
||||
warn = 2,
|
||||
none = 3,
|
||||
warn_with_error_code = 4,
|
||||
bun = 5,
|
||||
|
||||
pub const map = bun.ComptimeStringMap(UnhandledRejections, .{
|
||||
.{ "strict", .strict },
|
||||
.{ "throw", .throw },
|
||||
.{ "warn", .warn },
|
||||
.{ "none", .none },
|
||||
.{ "warn-with-error-code", .warn_with_error_code },
|
||||
});
|
||||
};
|
||||
|
||||
pub const TransformOptions = struct {
|
||||
/// jsx
|
||||
jsx: ?Jsx = null,
|
||||
@@ -1709,6 +1726,8 @@ pub const Api = struct {
|
||||
|
||||
// from --no-addons. null == true
|
||||
allow_addons: ?bool = null,
|
||||
/// from --unhandled-rejections, default is 'bun'
|
||||
unhandled_rejections: ?UnhandledRejections = null,
|
||||
|
||||
bunfig_path: []const u8,
|
||||
|
||||
@@ -3002,6 +3021,8 @@ pub const Api = struct {
|
||||
|
||||
ignore_scripts: ?bool = null,
|
||||
|
||||
link_workspace_packages: ?bool = null,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!BunInstall {
|
||||
var this = std.mem.zeroes(BunInstall);
|
||||
|
||||
|
||||
@@ -1132,7 +1132,7 @@ pub const LinuxWaker = struct {
|
||||
|
||||
pub const KEventWaker = struct {
|
||||
kq: std.posix.fd_t,
|
||||
machport: *anyopaque = undefined,
|
||||
machport: bun.mach_port = undefined,
|
||||
machport_buf: []u8 = &.{},
|
||||
has_pending_wake: bool = false,
|
||||
|
||||
@@ -1155,6 +1155,10 @@ pub const KEventWaker = struct {
|
||||
}
|
||||
|
||||
pub fn wait(this: Waker) void {
|
||||
if (!bun.FD.fromNative(this.kq).isValid()) {
|
||||
return;
|
||||
}
|
||||
|
||||
bun.JSC.markBinding(@src());
|
||||
var events = zeroed;
|
||||
|
||||
@@ -1169,14 +1173,15 @@ pub const KEventWaker = struct {
|
||||
);
|
||||
}
|
||||
|
||||
extern fn io_darwin_close_machport(bun.mach_port) void;
|
||||
|
||||
extern fn io_darwin_create_machport(
|
||||
*anyopaque,
|
||||
std.posix.fd_t,
|
||||
*anyopaque,
|
||||
usize,
|
||||
) ?*anyopaque;
|
||||
) bun.mach_port;
|
||||
|
||||
extern fn io_darwin_schedule_wakeup(*anyopaque) bool;
|
||||
extern fn io_darwin_schedule_wakeup(bun.mach_port) bool;
|
||||
|
||||
pub fn init() !Waker {
|
||||
return initWithFileDescriptor(bun.default_allocator, try std.posix.kqueue());
|
||||
@@ -1187,11 +1192,13 @@ pub const KEventWaker = struct {
|
||||
bun.assert(kq > -1);
|
||||
const machport_buf = try allocator.alloc(u8, 1024);
|
||||
const machport = io_darwin_create_machport(
|
||||
machport_buf.ptr,
|
||||
kq,
|
||||
machport_buf.ptr,
|
||||
1024,
|
||||
) orelse return error.MachportCreationFailed;
|
||||
);
|
||||
if (machport == 0) {
|
||||
return error.MachportCreationFailed;
|
||||
}
|
||||
|
||||
return Waker{
|
||||
.kq = kq,
|
||||
|
||||
@@ -112,7 +112,7 @@ watcher_atomics: WatcherAtomics,
|
||||
/// and bundling times, where the test harness (bake-harness.ts) would not wait
|
||||
/// long enough for processing to complete. Checking client logs, for example,
|
||||
/// not only must wait on DevServer, but also wait on all connected WebSocket
|
||||
/// clients to recieve their update, but also wait for those modules
|
||||
/// clients to receive their update, but also wait for those modules
|
||||
/// (potentially async) to finish loading.
|
||||
///
|
||||
/// To solve the first part of this, DevServer exposes a special WebSocket
|
||||
@@ -133,7 +133,7 @@ testing_batch_events: union(enum) {
|
||||
enable_after_bundle,
|
||||
/// DevServer will not start new bundles, but instead write all files into
|
||||
/// this `TestingBatch` object. Additionally, writes into this will signal
|
||||
/// a message saying that new files have been seen. Once DevServer recieves
|
||||
/// a message saying that new files have been seen. Once DevServer receives
|
||||
/// that signal, or times out, it will "release" this batch.
|
||||
enabled: TestingBatch,
|
||||
},
|
||||
@@ -760,6 +760,9 @@ pub fn deinit(dev: *DevServer) void {
|
||||
.html_routes_hard_affected = dev.incremental_result.html_routes_hard_affected.deinit(allocator),
|
||||
}),
|
||||
.has_tailwind_plugin_hack = if (dev.has_tailwind_plugin_hack) |*hack| {
|
||||
for (hack.keys()) |key| {
|
||||
allocator.free(key);
|
||||
}
|
||||
hack.deinit(allocator);
|
||||
},
|
||||
.directory_watchers = {
|
||||
@@ -1140,20 +1143,20 @@ pub fn setRoutes(dev: *DevServer, server: anytype) !bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn onNotFound(_: *DevServer, _: *Request, resp: anytype) void {
|
||||
fn onNotFound(_: *DevServer, _: *Request, resp: AnyResponse) void {
|
||||
notFound(resp);
|
||||
}
|
||||
|
||||
fn notFound(resp: anytype) void {
|
||||
fn notFound(resp: AnyResponse) void {
|
||||
resp.corked(onNotFoundCorked, .{resp});
|
||||
}
|
||||
|
||||
fn onNotFoundCorked(resp: anytype) void {
|
||||
fn onNotFoundCorked(resp: AnyResponse) void {
|
||||
resp.writeStatus("404 Not Found");
|
||||
resp.end("Not Found", false);
|
||||
}
|
||||
|
||||
fn onOutdatedJSCorked(resp: anytype) void {
|
||||
fn onOutdatedJSCorked(resp: AnyResponse) void {
|
||||
// Send a payload to instantly reload the page. This only happens when the
|
||||
// client bundle is invalidated while the page is loading, aka when you
|
||||
// perform many file updates that cannot be hot-updated.
|
||||
@@ -1273,11 +1276,11 @@ inline fn redirectHandler(comptime path: []const u8, comptime is_ssl: bool) fn (
|
||||
}.handle;
|
||||
}
|
||||
|
||||
fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: anytype) void {
|
||||
fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: AnyResponse) void {
|
||||
resp.corked(onIncrementalVisualizerCorked, .{resp});
|
||||
}
|
||||
|
||||
fn onIncrementalVisualizerCorked(resp: anytype) void {
|
||||
fn onIncrementalVisualizerCorked(resp: AnyResponse) void {
|
||||
const code = if (Environment.codegen_embed)
|
||||
@embedFile("incremental_visualizer.html")
|
||||
else
|
||||
@@ -1285,11 +1288,11 @@ fn onIncrementalVisualizerCorked(resp: anytype) void {
|
||||
resp.end(code, false);
|
||||
}
|
||||
|
||||
fn onMemoryVisualizer(_: *DevServer, _: *Request, resp: anytype) void {
|
||||
fn onMemoryVisualizer(_: *DevServer, _: *Request, resp: AnyResponse) void {
|
||||
resp.corked(onMemoryVisualizerCorked, .{resp});
|
||||
}
|
||||
|
||||
fn onMemoryVisualizerCorked(resp: anytype) void {
|
||||
fn onMemoryVisualizerCorked(resp: AnyResponse) void {
|
||||
const code = if (Environment.codegen_embed)
|
||||
@embedFile("memory_visualizer.html")
|
||||
else
|
||||
@@ -1303,7 +1306,7 @@ fn ensureRouteIsBundled(
|
||||
kind: DeferredRequest.Handler.Kind,
|
||||
req: *Request,
|
||||
resp: AnyResponse,
|
||||
) bun.OOM!void {
|
||||
) bun.JSError!void {
|
||||
assert(dev.magic == .valid);
|
||||
assert(dev.server != null);
|
||||
sw: switch (dev.routeBundlePtr(route_bundle_index).server_state) {
|
||||
@@ -1416,7 +1419,7 @@ fn ensureRouteIsBundled(
|
||||
);
|
||||
},
|
||||
.loaded => switch (kind) {
|
||||
.server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp),
|
||||
.server_handler => try dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp),
|
||||
.bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST),
|
||||
},
|
||||
}
|
||||
@@ -1525,7 +1528,7 @@ fn onFrameworkRequestWithBundle(
|
||||
route_bundle_index: RouteBundle.Index,
|
||||
req: bun.JSC.API.SavedRequest.Union,
|
||||
resp: AnyResponse,
|
||||
) void {
|
||||
) bun.JSError!void {
|
||||
const route_bundle = dev.routeBundlePtr(route_bundle_index);
|
||||
assert(route_bundle.data == .framework);
|
||||
const bundle = &route_bundle.data.framework;
|
||||
@@ -1559,7 +1562,7 @@ fn onFrameworkRequestWithBundle(
|
||||
if (route.file_layout != .none) n += 1;
|
||||
route = dev.router.routePtr(route.parent.unwrap() orelse break);
|
||||
}
|
||||
const arr = JSValue.createEmptyArray(global, n);
|
||||
const arr = try JSValue.createEmptyArray(global, n);
|
||||
route = dev.router.routePtr(bundle.route_index);
|
||||
var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
|
||||
arr.putIndex(global, 0, route_name.transferToJS(global));
|
||||
@@ -2123,7 +2126,7 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u
|
||||
return client_bundle;
|
||||
}
|
||||
|
||||
fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.JSValue {
|
||||
fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!JSC.JSValue {
|
||||
assert(route_bundle.data == .framework); // a JSC.JSValue has no purpose, and therefore isn't implemented.
|
||||
if (Environment.allow_assert) assert(!route_bundle.data.framework.cached_css_file_array.has());
|
||||
assert(route_bundle.server_state == .loaded); // page is unfit to load
|
||||
@@ -2143,7 +2146,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J
|
||||
try dev.traceAllRouteImports(route_bundle, >s, .find_css);
|
||||
|
||||
const names = dev.client_graph.current_css_files.items;
|
||||
const arr = JSC.JSArray.createEmpty(dev.vm.global, names.len);
|
||||
const arr = try JSC.JSArray.createEmpty(dev.vm.global, names.len);
|
||||
for (names, 0..) |item, i| {
|
||||
var buf: [asset_prefix.len + @sizeOf(u64) * 2 + "/.css".len]u8 = undefined;
|
||||
const path = std.fmt.bufPrint(&buf, asset_prefix ++ "/{s}.css", .{
|
||||
@@ -2187,9 +2190,9 @@ fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *Graph
|
||||
}
|
||||
}
|
||||
|
||||
fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObject, items: []const IncrementalGraph(.server).FileIndex) JSValue {
|
||||
fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObject, items: []const IncrementalGraph(.server).FileIndex) bun.JSError!JSValue {
|
||||
if (items.len == 0) return .null;
|
||||
const arr = JSC.JSArray.createEmpty(global, items.len);
|
||||
const arr = try JSC.JSArray.createEmpty(global, items.len);
|
||||
const names = dev.server_graph.bundled_files.keys();
|
||||
for (items, 0..) |item, i| {
|
||||
const str = bun.String.createUTF8(dev.relativePath(names[item.get()]));
|
||||
@@ -2248,7 +2251,7 @@ pub fn finalizeBundle(
|
||||
dev: *DevServer,
|
||||
bv2: *bun.bundle_v2.BundleV2,
|
||||
result: *const bun.bundle_v2.DevServerOutput,
|
||||
) bun.OOM!void {
|
||||
) bun.JSError!void {
|
||||
assert(dev.magic == .valid);
|
||||
var had_sent_hmr_event = false;
|
||||
defer {
|
||||
@@ -2411,9 +2414,14 @@ pub fn finalizeBundle(
|
||||
if (dev.has_tailwind_plugin_hack) |*map| {
|
||||
const first_1024 = code.buffer[0..@min(code.buffer.len, 1024)];
|
||||
if (std.mem.indexOf(u8, first_1024, "tailwind") != null) {
|
||||
try map.put(dev.allocator, key, {});
|
||||
const entry = try map.getOrPut(dev.allocator, key);
|
||||
if (!entry.found_existing) {
|
||||
entry.key_ptr.* = try dev.allocator.dupe(u8, key);
|
||||
}
|
||||
} else {
|
||||
_ = map.swapRemove(key);
|
||||
if (map.fetchSwapRemove(key)) |entry| {
|
||||
dev.allocator.free(entry.key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2539,8 +2547,8 @@ pub fn finalizeBundle(
|
||||
dev.vm.global.toJSValue(),
|
||||
&.{
|
||||
server_modules,
|
||||
dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_added.items),
|
||||
dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_removed.items),
|
||||
try dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_added.items),
|
||||
try dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_removed.items),
|
||||
},
|
||||
) catch |err| {
|
||||
// One module replacement error should NOT prevent follow-up
|
||||
@@ -2902,7 +2910,7 @@ pub fn finalizeBundle(
|
||||
|
||||
switch (req.handler) {
|
||||
.aborted => continue,
|
||||
.server_handler => |saved| dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response),
|
||||
.server_handler => |saved| try dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response),
|
||||
.bundled_html_page => |ram| dev.onHtmlRequestWithBundle(req.route_bundle_index, ram.response, ram.method),
|
||||
}
|
||||
}
|
||||
@@ -4850,11 +4858,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
// Additionally, clear the cached entry of the file from the path to
|
||||
// source index map.
|
||||
const hash = bun.hash(abs_path);
|
||||
for ([_]*bun.bundle_v2.PathToSourceIndexMap{
|
||||
&bv2.graph.path_to_source_index_map,
|
||||
&bv2.graph.client_path_to_source_index_map,
|
||||
&bv2.graph.ssr_path_to_source_index_map,
|
||||
}) |map| {
|
||||
for (&bv2.graph.build_graphs.values) |*map| {
|
||||
_ = map.remove(hash);
|
||||
}
|
||||
}
|
||||
@@ -6043,7 +6047,7 @@ pub fn onWebSocketUpgrade(
|
||||
dev: *DevServer,
|
||||
res: anytype,
|
||||
req: *Request,
|
||||
upgrade_ctx: *uws.uws_socket_context_t,
|
||||
upgrade_ctx: *uws.SocketContext,
|
||||
id: usize,
|
||||
) void {
|
||||
assert(id == 0);
|
||||
@@ -8496,7 +8500,6 @@ const BundleV2 = bun.bundle_v2.BundleV2;
|
||||
const Chunk = bun.bundle_v2.Chunk;
|
||||
const ContentHasher = bun.bundle_v2.ContentHasher;
|
||||
|
||||
|
||||
const uws = bun.uws;
|
||||
const AnyWebSocket = uws.AnyWebSocket;
|
||||
const Request = uws.Request;
|
||||
|
||||
@@ -1094,14 +1094,14 @@ pub const JSFrameworkRouter = struct {
|
||||
|
||||
const validators = bun.JSC.Node.validators;
|
||||
|
||||
pub fn getBindings(global: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSObject.create(.{
|
||||
pub fn getBindings(global: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
|
||||
return (try JSC.JSObject.create(.{
|
||||
.parseRoutePattern = global.createHostFunction("parseRoutePattern", parseRoutePattern, 1),
|
||||
.FrameworkRouter = js.getConstructor(global),
|
||||
}, global).toJS();
|
||||
}, global)).toJS();
|
||||
}
|
||||
|
||||
pub fn constructor(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) !*JSFrameworkRouter {
|
||||
pub fn constructor(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*JSFrameworkRouter {
|
||||
const opts = callframe.argumentsAsArray(1)[0];
|
||||
if (!opts.isObject())
|
||||
return global.throwInvalidArguments("FrameworkRouter needs an object as it's first argument", .{});
|
||||
@@ -1110,7 +1110,7 @@ pub const JSFrameworkRouter = struct {
|
||||
return global.throwInvalidArguments("Missing options.root", .{});
|
||||
defer root.deinit();
|
||||
|
||||
var style = try Style.fromJS(try opts.getOptional(global, "style", JSValue) orelse .undefined, global);
|
||||
var style = try Style.fromJS(try opts.getOptional(global, "style", JSValue) orelse .js_undefined, global);
|
||||
errdefer style.deinit();
|
||||
|
||||
const abs_root = try bun.default_allocator.dupe(u8, bun.strings.withoutTrailingSlash(
|
||||
@@ -1144,7 +1144,7 @@ pub const JSFrameworkRouter = struct {
|
||||
InsertionContext.wrap(JSFrameworkRouter, jsfr),
|
||||
);
|
||||
if (jsfr.stored_parse_errors.items.len > 0) {
|
||||
const arr = JSValue.createEmptyArray(global, jsfr.stored_parse_errors.items.len);
|
||||
const arr = try JSValue.createEmptyArray(global, jsfr.stored_parse_errors.items.len);
|
||||
for (jsfr.stored_parse_errors.items, 0..) |*item, i| {
|
||||
arr.putIndex(
|
||||
global,
|
||||
@@ -1176,7 +1176,7 @@ pub const JSFrameworkRouter = struct {
|
||||
var sfb = std.heap.stackFallback(4096, bun.default_allocator);
|
||||
const alloc = sfb.get();
|
||||
|
||||
return JSC.JSObject.create(.{
|
||||
return (try JSC.JSObject.create(.{
|
||||
.params = if (params_out.params.len > 0) params: {
|
||||
const obj = JSValue.createEmptyObject(global, params_out.params.len);
|
||||
for (params_out.params.slice()) |param| {
|
||||
@@ -1187,7 +1187,7 @@ pub const JSFrameworkRouter = struct {
|
||||
break :params obj;
|
||||
} else .null,
|
||||
.route = try jsfr.routeToJsonInverse(global, index, alloc),
|
||||
}, global).toJS();
|
||||
}, global)).toJS();
|
||||
}
|
||||
|
||||
return .null;
|
||||
@@ -1204,7 +1204,7 @@ pub const JSFrameworkRouter = struct {
|
||||
|
||||
fn routeToJson(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return JSC.JSObject.create(.{
|
||||
return (try JSC.JSObject.create(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
@@ -1214,7 +1214,7 @@ pub const JSFrameworkRouter = struct {
|
||||
var next = route.first_child.unwrap();
|
||||
while (next) |r| : (next = jsfr.router.routePtr(r).next_sibling.unwrap())
|
||||
len += 1;
|
||||
const arr = JSValue.createEmptyArray(global, len);
|
||||
const arr = try JSValue.createEmptyArray(global, len);
|
||||
next = route.first_child.unwrap();
|
||||
var i: u32 = 0;
|
||||
while (next) |r| : (next = jsfr.router.routePtr(r).next_sibling.unwrap()) {
|
||||
@@ -1223,12 +1223,12 @@ pub const JSFrameworkRouter = struct {
|
||||
}
|
||||
break :brk arr;
|
||||
},
|
||||
}, global).toJS();
|
||||
}, global)).toJS();
|
||||
}
|
||||
|
||||
fn routeToJsonInverse(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return JSC.JSObject.create(.{
|
||||
return (try JSC.JSObject.create(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
@@ -1237,7 +1237,7 @@ pub const JSFrameworkRouter = struct {
|
||||
try routeToJsonInverse(jsfr, global, parent, allocator)
|
||||
else
|
||||
.null,
|
||||
}, global).toJS();
|
||||
}, global)).toJS();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *JSFrameworkRouter) void {
|
||||
|
||||
@@ -104,13 +104,13 @@ pub const SplitBundlerOptions = struct {
|
||||
.ssr = .{},
|
||||
};
|
||||
|
||||
pub fn parsePluginArray(opts: *SplitBundlerOptions, plugin_array: JSValue, global: *JSC.JSGlobalObject) !void {
|
||||
pub fn parsePluginArray(opts: *SplitBundlerOptions, plugin_array: JSValue, global: *JSC.JSGlobalObject) bun.JSError!void {
|
||||
const plugin = opts.plugin orelse Plugin.create(global, .bun);
|
||||
opts.plugin = plugin;
|
||||
const empty_object = JSValue.createEmptyObject(global, 0);
|
||||
|
||||
var iter = plugin_array.arrayIterator(global);
|
||||
while (iter.next()) |plugin_config| {
|
||||
var iter = try plugin_array.arrayIterator(global);
|
||||
while (try iter.next()) |plugin_config| {
|
||||
if (!plugin_config.isObject()) {
|
||||
return global.throwInvalidArguments("Expected plugin to be an object", .{});
|
||||
}
|
||||
@@ -359,7 +359,7 @@ pub const Framework = struct {
|
||||
refs: *StringRefList,
|
||||
bundler_options: *SplitBundlerOptions,
|
||||
arena: Allocator,
|
||||
) !Framework {
|
||||
) bun.JSError!Framework {
|
||||
if (opts.isString()) {
|
||||
const str = try opts.toBunString(global);
|
||||
defer str.deref();
|
||||
@@ -391,7 +391,7 @@ pub const Framework = struct {
|
||||
break :brk null;
|
||||
|
||||
if (rfr == .true) break :brk .{};
|
||||
if (rfr == .false or rfr == .null or rfr == .undefined) break :brk null;
|
||||
if (rfr == .false or rfr.isUndefinedOrNull()) break :brk null;
|
||||
|
||||
if (!rfr.isObject()) {
|
||||
return global.throwInvalidArguments("'framework.reactFastRefresh' must be an object or 'true'", .{});
|
||||
@@ -411,7 +411,7 @@ pub const Framework = struct {
|
||||
const server_components: ?ServerComponents = sc: {
|
||||
const sc: JSValue = try opts.get(global, "serverComponents") orelse
|
||||
break :sc null;
|
||||
if (sc == .false or sc == .null or sc == .undefined) break :sc null;
|
||||
if (sc == .false or sc.isUndefinedOrNull()) break :sc null;
|
||||
|
||||
if (!sc.isObject()) {
|
||||
return global.throwInvalidArguments("'framework.serverComponents' must be an object or 'undefined'", .{});
|
||||
@@ -446,13 +446,13 @@ pub const Framework = struct {
|
||||
const array = try opts.getArray(global, "builtInModules") orelse
|
||||
break :built_in_modules .{};
|
||||
|
||||
const len = array.getLength(global);
|
||||
const len = try array.getLength(global);
|
||||
var files: bun.StringArrayHashMapUnmanaged(BuiltInModule) = .{};
|
||||
try files.ensureTotalCapacity(arena, len);
|
||||
|
||||
var it = array.arrayIterator(global);
|
||||
var it = try array.arrayIterator(global);
|
||||
var i: usize = 0;
|
||||
while (it.next()) |file| : (i += 1) {
|
||||
while (try it.next()) |file| : (i += 1) {
|
||||
if (!file.isObject()) {
|
||||
return global.throwInvalidArguments("'builtInModules[{d}]' is not an object", .{i});
|
||||
}
|
||||
@@ -477,16 +477,16 @@ pub const Framework = struct {
|
||||
const array: JSValue = try opts.getArray(global, "fileSystemRouterTypes") orelse {
|
||||
return global.throwInvalidArguments("Missing 'framework.fileSystemRouterTypes'", .{});
|
||||
};
|
||||
const len = array.getLength(global);
|
||||
const len = try array.getLength(global);
|
||||
if (len > 256) {
|
||||
return global.throwInvalidArguments("Framework can only define up to 256 file-system router types", .{});
|
||||
}
|
||||
const file_system_router_types = try arena.alloc(FileSystemRouterType, len);
|
||||
|
||||
var it = array.arrayIterator(global);
|
||||
var it = try array.arrayIterator(global);
|
||||
var i: usize = 0;
|
||||
errdefer for (file_system_router_types[0..i]) |*fsr| fsr.style.deinit();
|
||||
while (it.next()) |fsr_opts| : (i += 1) {
|
||||
while (try it.next()) |fsr_opts| : (i += 1) {
|
||||
const root = try getOptionalString(fsr_opts, global, "root", refs, arena) orelse {
|
||||
return global.throwInvalidArguments("'fileSystemRouterTypes[{d}]' is missing 'root'", .{i});
|
||||
};
|
||||
@@ -511,10 +511,10 @@ pub const Framework = struct {
|
||||
break :exts &.{};
|
||||
}
|
||||
} else if (exts_js.isArray()) {
|
||||
var it_2 = exts_js.arrayIterator(global);
|
||||
var it_2 = try exts_js.arrayIterator(global);
|
||||
var i_2: usize = 0;
|
||||
const extensions = try arena.alloc([]const u8, exts_js.getLength(global));
|
||||
while (it_2.next()) |array_item| : (i_2 += 1) {
|
||||
const extensions = try arena.alloc([]const u8, try exts_js.getLength(global));
|
||||
while (try it_2.next()) |array_item| : (i_2 += 1) {
|
||||
const slice = refs.track(try array_item.toSlice(global, arena));
|
||||
if (bun.strings.eqlComptime(slice, "*"))
|
||||
return global.throwInvalidArguments("'extensions' cannot include \"*\" as an extension. Pass \"*\" instead of the array.", .{});
|
||||
@@ -536,10 +536,10 @@ pub const Framework = struct {
|
||||
|
||||
const ignore_dirs: []const []const u8 = if (try fsr_opts.get(global, "ignoreDirs")) |exts_js| exts: {
|
||||
if (exts_js.isArray()) {
|
||||
var it_2 = array.arrayIterator(global);
|
||||
var it_2 = try array.arrayIterator(global);
|
||||
var i_2: usize = 0;
|
||||
const dirs = try arena.alloc([]const u8, len);
|
||||
while (it_2.next()) |array_item| : (i_2 += 1) {
|
||||
while (try it_2.next()) |array_item| : (i_2 += 1) {
|
||||
dirs[i_2] = refs.track(try array_item.toSlice(global, arena));
|
||||
}
|
||||
break :exts dirs;
|
||||
@@ -719,7 +719,7 @@ fn getOptionalString(
|
||||
) !?[]const u8 {
|
||||
const value = try target.get(global, property) orelse
|
||||
return null;
|
||||
if (value == .undefined or value == .null)
|
||||
if (value.isUndefinedOrNull())
|
||||
return null;
|
||||
const str = try value.toBunString(global);
|
||||
return allocations.track(str.toUTF8(arena));
|
||||
|
||||
@@ -141,7 +141,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
var options = switch (config_promise.unwrap(vm.jsc, .mark_handled)) {
|
||||
.pending => unreachable,
|
||||
.fulfilled => |resolved| config: {
|
||||
bun.assert(resolved == .undefined);
|
||||
bun.assert(resolved.isUndefined());
|
||||
const default = BakeGetDefaultExportFromModule(vm.global, config_entry_point_string.toJS(vm.global));
|
||||
|
||||
if (!default.isObject()) {
|
||||
@@ -356,9 +356,9 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
pt.attach();
|
||||
|
||||
// Static site generator
|
||||
const server_render_funcs = JSValue.createEmptyArray(global, router.types.len);
|
||||
const server_param_funcs = JSValue.createEmptyArray(global, router.types.len);
|
||||
const client_entry_urls = JSValue.createEmptyArray(global, router.types.len);
|
||||
const server_render_funcs = try JSValue.createEmptyArray(global, router.types.len);
|
||||
const server_param_funcs = try JSValue.createEmptyArray(global, router.types.len);
|
||||
const client_entry_urls = try JSValue.createEmptyArray(global, router.types.len);
|
||||
|
||||
for (router.types, 0..) |router_type, i| {
|
||||
if (router_type.client_file.unwrap()) |client_file| {
|
||||
@@ -421,12 +421,12 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
|
||||
}
|
||||
|
||||
const route_patterns = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_nested_files = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_type_and_flags = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_source_files = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_param_info = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_style_references = JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_patterns = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_nested_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_type_and_flags = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_source_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_param_info = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
const route_style_references = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
|
||||
|
||||
var params_buf: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
for (navigatable_routes.items, 0..) |route_index, nav_index| {
|
||||
@@ -476,8 +476,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
}
|
||||
|
||||
// Fill styles and file_list
|
||||
const styles = JSValue.createEmptyArray(global, css_chunks_count);
|
||||
const file_list = JSValue.createEmptyArray(global, file_count);
|
||||
const styles = try JSValue.createEmptyArray(global, css_chunks_count);
|
||||
const file_list = try JSValue.createEmptyArray(global, file_count);
|
||||
|
||||
next = route.parent.unwrap();
|
||||
file_count = 1;
|
||||
@@ -523,7 +523,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
})));
|
||||
|
||||
if (params_buf.items.len > 0) {
|
||||
const param_info_array = JSValue.createEmptyArray(global, params_buf.items.len);
|
||||
const param_info_array = try JSValue.createEmptyArray(global, params_buf.items.len);
|
||||
for (params_buf.items, 0..) |param, i| {
|
||||
param_info_array.putIndex(global, @intCast(params_buf.items.len - i - 1), bun.String.createUTF8ForJS(global, param));
|
||||
}
|
||||
@@ -572,7 +572,7 @@ fn loadModule(vm: *VirtualMachine, global: *JSC.JSGlobalObject, key: JSValue) !J
|
||||
switch (promise.unwrap(vm.jsc, .mark_handled)) {
|
||||
.pending => unreachable,
|
||||
.fulfilled => |val| {
|
||||
bun.assert(val == .undefined);
|
||||
bun.assert(val.isUndefined());
|
||||
return BakeGetModuleNamespace(global, key);
|
||||
},
|
||||
.rejected => |err| {
|
||||
@@ -766,10 +766,10 @@ pub const PerThread = struct {
|
||||
|
||||
/// After initializing, call `attach`
|
||||
pub fn init(vm: *VirtualMachine, opts: Options) !PerThread {
|
||||
const loaded_files = try bun.bit_set.AutoBitSet.initEmpty(vm.allocator, opts.output_indexes.len);
|
||||
var loaded_files = try bun.bit_set.AutoBitSet.initEmpty(vm.allocator, opts.output_indexes.len);
|
||||
errdefer loaded_files.deinit(vm.allocator);
|
||||
|
||||
const all_server_files = JSValue.createEmptyArray(vm.global, opts.output_indexes.len);
|
||||
const all_server_files = try JSValue.createEmptyArray(vm.global, opts.output_indexes.len);
|
||||
all_server_files.protect();
|
||||
|
||||
return .{
|
||||
|
||||
@@ -26,7 +26,7 @@ pub const BuildMessage = struct {
|
||||
|
||||
pub fn getNotes(this: *BuildMessage, globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
|
||||
const notes = this.msg.notes;
|
||||
const array = JSC.JSValue.createEmptyArray(globalThis, notes.len);
|
||||
const array = try JSC.JSValue.createEmptyArray(globalThis, notes.len);
|
||||
for (notes, 0..) |note, i| {
|
||||
const cloned = try note.clone(bun.default_allocator);
|
||||
array.putIndex(
|
||||
|
||||
@@ -184,8 +184,8 @@ fn messageWithTypeAndLevel_(
|
||||
// if value is not an object/array/iterable, don't print a table and just print it
|
||||
var tabular_data = vals[0];
|
||||
if (tabular_data.isObject()) {
|
||||
const properties = if (len >= 2 and vals[1].jsType().isArray()) vals[1] else JSValue.undefined;
|
||||
var table_printer = TablePrinter.init(
|
||||
const properties: JSValue = if (len >= 2 and vals[1].jsType().isArray()) vals[1] else .js_undefined;
|
||||
var table_printer = try TablePrinter.init(
|
||||
global,
|
||||
level,
|
||||
tabular_data,
|
||||
@@ -277,13 +277,13 @@ pub const TablePrinter = struct {
|
||||
level: MessageLevel,
|
||||
tabular_data: JSValue,
|
||||
properties: JSValue,
|
||||
) TablePrinter {
|
||||
) bun.JSError!TablePrinter {
|
||||
return TablePrinter{
|
||||
.level = level,
|
||||
.globalObject = globalObject,
|
||||
.tabular_data = tabular_data,
|
||||
.properties = properties,
|
||||
.is_iterable = tabular_data.isIterable(globalObject),
|
||||
.is_iterable = try tabular_data.isIterable(globalObject),
|
||||
.jstype = tabular_data.jsType(),
|
||||
.value_formatter = ConsoleObject.Formatter{
|
||||
.remaining_values = &[_]JSValue{},
|
||||
@@ -320,11 +320,11 @@ pub const TablePrinter = struct {
|
||||
};
|
||||
|
||||
/// Compute how much horizontal space will take a JSValue when printed
|
||||
fn getWidthForValue(this: *TablePrinter, value: JSValue) u32 {
|
||||
fn getWidthForValue(this: *TablePrinter, value: JSValue) bun.JSError!u32 {
|
||||
var width: usize = 0;
|
||||
var value_formatter = this.value_formatter;
|
||||
|
||||
const tag = ConsoleObject.Formatter.Tag.get(value, this.globalObject);
|
||||
const tag = try ConsoleObject.Formatter.Tag.get(value, this.globalObject);
|
||||
value_formatter.quote_strings = !(tag.tag == .String or tag.tag == .StringPossiblyFormatted);
|
||||
value_formatter.format(
|
||||
tag,
|
||||
@@ -343,7 +343,7 @@ pub const TablePrinter = struct {
|
||||
}
|
||||
|
||||
/// Update the sizes of the columns for the values of a given row, and create any additional columns as needed
|
||||
fn updateColumnsForRow(this: *TablePrinter, columns: *std.ArrayList(Column), row_key: RowKey, row_value: JSValue) !void {
|
||||
fn updateColumnsForRow(this: *TablePrinter, columns: *std.ArrayList(Column), row_key: RowKey, row_value: JSValue) bun.JSError!void {
|
||||
// update size of "(index)" column
|
||||
const row_key_len: u32 = switch (row_key) {
|
||||
.str => |value| @intCast(value.visibleWidthExcludeANSIColors(false)),
|
||||
@@ -353,10 +353,10 @@ pub const TablePrinter = struct {
|
||||
|
||||
// special handling for Map: column with idx=1 is "Keys"
|
||||
if (this.jstype.isMap()) {
|
||||
const entry_key = row_value.getIndex(this.globalObject, 0);
|
||||
const entry_value = row_value.getIndex(this.globalObject, 1);
|
||||
columns.items[1].width = @max(columns.items[1].width, this.getWidthForValue(entry_key));
|
||||
this.values_col_width = @max(this.values_col_width orelse 0, this.getWidthForValue(entry_value));
|
||||
const entry_key = try row_value.getIndex(this.globalObject, 0);
|
||||
const entry_value = try row_value.getIndex(this.globalObject, 1);
|
||||
columns.items[1].width = @max(columns.items[1].width, try this.getWidthForValue(entry_key));
|
||||
this.values_col_width = @max(this.values_col_width orelse 0, try this.getWidthForValue(entry_value));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -366,8 +366,8 @@ pub const TablePrinter = struct {
|
||||
// - otherwise: iterate the object properties, and create the columns on-demand
|
||||
if (!this.properties.isUndefined()) {
|
||||
for (columns.items[1..]) |*column| {
|
||||
if (row_value.getOwn(this.globalObject, column.name)) |value| {
|
||||
column.width = @max(column.width, this.getWidthForValue(value));
|
||||
if (try row_value.getOwn(this.globalObject, column.name)) |value| {
|
||||
column.width = @max(column.width, try this.getWidthForValue(value));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -399,12 +399,12 @@ pub const TablePrinter = struct {
|
||||
break :brk &columns.items[columns.items.len - 1];
|
||||
};
|
||||
|
||||
column.width = @max(column.width, this.getWidthForValue(value));
|
||||
column.width = @max(column.width, try this.getWidthForValue(value));
|
||||
}
|
||||
}
|
||||
} else if (this.properties.isUndefined()) {
|
||||
// not object -> the value will go to the special "Values" column
|
||||
this.values_col_width = @max(this.values_col_width orelse 1, this.getWidthForValue(row_value));
|
||||
this.values_col_width = @max(this.values_col_width orelse 1, try this.getWidthForValue(row_value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,24 +452,24 @@ pub const TablePrinter = struct {
|
||||
|
||||
var value = JSValue.zero;
|
||||
if (col_idx == 1 and this.jstype.isMap()) { // is the "Keys" column, when iterating a Map?
|
||||
value = row_value.getIndex(this.globalObject, 0);
|
||||
value = try row_value.getIndex(this.globalObject, 0);
|
||||
} else if (col_idx == this.values_col_idx) { // is the "Values" column?
|
||||
if (this.jstype.isMap()) {
|
||||
value = row_value.getIndex(this.globalObject, 1);
|
||||
value = try row_value.getIndex(this.globalObject, 1);
|
||||
} else if (!row_value.isObject()) {
|
||||
value = row_value;
|
||||
}
|
||||
} else if (row_value.isObject()) {
|
||||
value = row_value.getOwn(this.globalObject, col.name) orelse JSValue.zero;
|
||||
value = try row_value.getOwn(this.globalObject, col.name) orelse JSValue.zero;
|
||||
}
|
||||
|
||||
if (value == .zero) {
|
||||
try writer.writeByteNTimes(' ', col.width + (PADDING * 2));
|
||||
} else {
|
||||
const len: u32 = this.getWidthForValue(value);
|
||||
const len: u32 = try this.getWidthForValue(value);
|
||||
const needed = col.width -| len;
|
||||
try writer.writeByteNTimes(' ', PADDING);
|
||||
const tag = ConsoleObject.Formatter.Tag.get(value, this.globalObject);
|
||||
const tag = try ConsoleObject.Formatter.Tag.get(value, this.globalObject);
|
||||
var value_formatter = this.value_formatter;
|
||||
|
||||
value_formatter.quote_strings = !(tag.tag == .String or tag.tag == .StringPossiblyFormatted);
|
||||
@@ -532,8 +532,8 @@ pub const TablePrinter = struct {
|
||||
|
||||
// if the "properties" arg was provided, pre-populate the columns
|
||||
if (!this.properties.isUndefined()) {
|
||||
var properties_iter = JSC.JSArrayIterator.init(this.properties, globalObject);
|
||||
while (properties_iter.next()) |value| {
|
||||
var properties_iter = try JSC.JSArrayIterator.init(this.properties, globalObject);
|
||||
while (try properties_iter.next()) |value| {
|
||||
try columns.append(.{
|
||||
.name = try value.toBunString(globalObject),
|
||||
});
|
||||
@@ -838,7 +838,7 @@ pub fn format2(
|
||||
.error_display_level = options.error_display_level,
|
||||
};
|
||||
defer fmt.deinit();
|
||||
const tag = ConsoleObject.Formatter.Tag.get(vals[0], global);
|
||||
const tag = try ConsoleObject.Formatter.Tag.get(vals[0], global);
|
||||
fmt.writeIndent(Writer, writer) catch return;
|
||||
|
||||
if (tag.tag == .String) {
|
||||
@@ -937,7 +937,7 @@ pub fn format2(
|
||||
}
|
||||
any = true;
|
||||
|
||||
tag = ConsoleObject.Formatter.Tag.get(this_value, global);
|
||||
tag = try ConsoleObject.Formatter.Tag.get(this_value, global);
|
||||
if (tag.tag == .String and fmt.remaining_values.len > 0) {
|
||||
tag.tag = .{ .StringPossiblyFormatted = {} };
|
||||
}
|
||||
@@ -959,7 +959,7 @@ pub fn format2(
|
||||
_ = writer.write(" ") catch 0;
|
||||
}
|
||||
any = true;
|
||||
tag = ConsoleObject.Formatter.Tag.get(this_value, global);
|
||||
tag = try ConsoleObject.Formatter.Tag.get(this_value, global);
|
||||
if (tag.tag == .String and fmt.remaining_values.len > 0) {
|
||||
tag.tag = .{ .StringPossiblyFormatted = {} };
|
||||
}
|
||||
@@ -1046,7 +1046,7 @@ pub const Formatter = struct {
|
||||
self.formatter.remaining_values = &[_]JSValue{};
|
||||
}
|
||||
try self.formatter.format(
|
||||
Tag.get(self.value, self.formatter.globalThis),
|
||||
try Tag.get(self.value, self.formatter.globalThis),
|
||||
@TypeOf(writer),
|
||||
writer,
|
||||
self.value,
|
||||
@@ -1181,7 +1181,7 @@ pub const Formatter = struct {
|
||||
cell: JSValue.JSType = JSValue.JSType.Cell,
|
||||
};
|
||||
|
||||
pub fn get(value: JSValue, globalThis: *JSGlobalObject) Result {
|
||||
pub fn get(value: JSValue, globalThis: *JSGlobalObject) bun.JSError!Result {
|
||||
return getAdvanced(value, globalThis, .{ .hide_global = false });
|
||||
}
|
||||
|
||||
@@ -1191,12 +1191,12 @@ pub const Formatter = struct {
|
||||
disable_inspect_custom: bool = false,
|
||||
};
|
||||
|
||||
pub fn getAdvanced(value: JSValue, globalThis: *JSGlobalObject, opts: Options) Result {
|
||||
switch (@intFromEnum(value)) {
|
||||
0, 0xa => return Result{
|
||||
pub fn getAdvanced(value: JSValue, globalThis: *JSGlobalObject, opts: Options) bun.JSError!Result {
|
||||
switch (value) {
|
||||
.zero, .js_undefined => return Result{
|
||||
.tag = .{ .Undefined = {} },
|
||||
},
|
||||
0x2 => return Result{
|
||||
.null => return Result{
|
||||
.tag = .{ .Null = {} },
|
||||
},
|
||||
else => {},
|
||||
@@ -1294,16 +1294,16 @@ pub const Formatter = struct {
|
||||
|
||||
// Is this a react element?
|
||||
if (js_type.isObject() and js_type != .ProxyObject) {
|
||||
if (value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| {
|
||||
if (try value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| {
|
||||
// React 18 and below
|
||||
var react_element_legacy = ZigString.init("react.element");
|
||||
// For React 19 - https://github.com/oven-sh/bun/issues/17223
|
||||
var react_element_transitional = ZigString.init("react.transitional.element");
|
||||
var react_fragment = ZigString.init("react.fragment");
|
||||
|
||||
if (JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_element_legacy), globalThis) or
|
||||
JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_element_transitional), globalThis) or
|
||||
JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_fragment), globalThis))
|
||||
if (try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_element_legacy), globalThis) or
|
||||
try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_element_transitional), globalThis) or
|
||||
try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_fragment), globalThis))
|
||||
{
|
||||
return .{ .tag = .{ .JSX = {} }, .cell = js_type };
|
||||
}
|
||||
@@ -1339,7 +1339,7 @@ pub const Formatter = struct {
|
||||
|
||||
.ProxyObject => tag: {
|
||||
const handler = value.getProxyInternalField(.handler);
|
||||
if (handler == .zero or handler == .undefined or handler == .null) {
|
||||
if (handler == .zero or handler.isUndefinedOrNull()) {
|
||||
break :tag .RevokedProxy;
|
||||
}
|
||||
break :tag .Proxy;
|
||||
@@ -1613,7 +1613,7 @@ pub const Formatter = struct {
|
||||
// > implementation-specific, potentially-interactive representation
|
||||
// > of an object judged to be maximally useful and informative.
|
||||
}
|
||||
try this.format(Tag.get(next_value, global), Writer, writer_, next_value, global, enable_ansi_colors);
|
||||
try this.format(try Tag.get(next_value, global), Writer, writer_, next_value, global, enable_ansi_colors);
|
||||
},
|
||||
|
||||
.c => {
|
||||
@@ -1766,8 +1766,8 @@ pub const Formatter = struct {
|
||||
this.writer.writeAll(" ") catch unreachable;
|
||||
}
|
||||
if (!is_iterator) {
|
||||
const key = nextValue.getIndex(globalObject, 0);
|
||||
const value = nextValue.getIndex(globalObject, 1);
|
||||
const key = nextValue.getIndex(globalObject, 0) catch return;
|
||||
const value = nextValue.getIndex(globalObject, 1) catch return;
|
||||
|
||||
if (!single_line) {
|
||||
this.formatter.writeIndent(Writer, this.writer) catch unreachable;
|
||||
@@ -1775,7 +1775,7 @@ pub const Formatter = struct {
|
||||
const key_tag = Tag.getAdvanced(key, globalObject, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.formatter.disable_inspect_custom,
|
||||
});
|
||||
}) catch return;
|
||||
|
||||
this.formatter.format(
|
||||
key_tag,
|
||||
@@ -1789,7 +1789,7 @@ pub const Formatter = struct {
|
||||
const value_tag = Tag.getAdvanced(value, globalObject, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.formatter.disable_inspect_custom,
|
||||
});
|
||||
}) catch return;
|
||||
this.formatter.format(
|
||||
value_tag,
|
||||
Writer,
|
||||
@@ -1806,7 +1806,7 @@ pub const Formatter = struct {
|
||||
const tag = Tag.getAdvanced(nextValue, globalObject, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.formatter.disable_inspect_custom,
|
||||
});
|
||||
}) catch return;
|
||||
this.formatter.format(
|
||||
tag,
|
||||
Writer,
|
||||
@@ -1847,7 +1847,7 @@ pub const Formatter = struct {
|
||||
const key_tag = Tag.getAdvanced(nextValue, globalObject, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.formatter.disable_inspect_custom,
|
||||
});
|
||||
}) catch return;
|
||||
this.formatter.format(
|
||||
key_tag,
|
||||
Writer,
|
||||
@@ -1924,7 +1924,7 @@ pub const Formatter = struct {
|
||||
const tag = Tag.getAdvanced(value, globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
}) catch return;
|
||||
|
||||
if (tag.cell.isHidden()) return;
|
||||
if (ctx.i == 0) {
|
||||
@@ -2270,7 +2270,7 @@ pub const Formatter = struct {
|
||||
if (result.isString()) {
|
||||
writer.print("{}", .{result.fmtString(this.globalThis)});
|
||||
} else {
|
||||
try this.format(ConsoleObject.Formatter.Tag.get(result, this.globalThis), Writer, writer_, result, this.globalThis, enable_ansi_colors);
|
||||
try this.format(try ConsoleObject.Formatter.Tag.get(result, this.globalThis), Writer, writer_, result, this.globalThis, enable_ansi_colors);
|
||||
}
|
||||
},
|
||||
.Symbol => {
|
||||
@@ -2369,7 +2369,7 @@ pub const Formatter = struct {
|
||||
}
|
||||
},
|
||||
.Array => {
|
||||
const len = value.getLength(this.globalThis);
|
||||
const len = try value.getLength(this.globalThis);
|
||||
|
||||
// TODO: DerivedArray does not get passed along in JSType, and it's not clear why.
|
||||
// if (jsType == .DerivedArray) {
|
||||
@@ -2404,7 +2404,7 @@ pub const Formatter = struct {
|
||||
first: {
|
||||
const element = value.getDirectIndex(this.globalThis, 0);
|
||||
|
||||
const tag = Tag.getAdvanced(element, this.globalThis, .{
|
||||
const tag = try Tag.getAdvanced(element, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -2487,7 +2487,7 @@ pub const Formatter = struct {
|
||||
writer.space();
|
||||
}
|
||||
|
||||
const tag = Tag.getAdvanced(element, this.globalThis, .{
|
||||
const tag = try Tag.getAdvanced(element, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -2534,10 +2534,7 @@ pub const Formatter = struct {
|
||||
.parent = value,
|
||||
.i = i,
|
||||
};
|
||||
value.forEachPropertyNonIndexed(this.globalThis, &iter, Iterator.forEach);
|
||||
if (this.globalThis.hasException()) {
|
||||
return error.JSError;
|
||||
}
|
||||
try value.forEachPropertyNonIndexed(this.globalThis, &iter, Iterator.forEach);
|
||||
if (this.failed) return;
|
||||
}
|
||||
}
|
||||
@@ -2571,7 +2568,7 @@ pub const Formatter = struct {
|
||||
s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {};
|
||||
return;
|
||||
} else if (value.as(bun.webcore.FetchHeaders) != null) {
|
||||
if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| {
|
||||
if (try value.get(this.globalThis, "toJSON")) |toJSONFunction| {
|
||||
this.addForNewLine("Headers ".len);
|
||||
writer.writeAll(comptime Output.prettyFmt("<r>Headers ", enable_ansi_colors));
|
||||
const prev_quote_keys = this.quote_keys;
|
||||
@@ -2589,7 +2586,7 @@ pub const Formatter = struct {
|
||||
);
|
||||
}
|
||||
} else if (value.as(JSC.DOMFormData) != null) {
|
||||
if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| {
|
||||
if (try value.get(this.globalThis, "toJSON")) |toJSONFunction| {
|
||||
const prev_quote_keys = this.quote_keys;
|
||||
this.quote_keys = true;
|
||||
defer this.quote_keys = prev_quote_keys;
|
||||
@@ -2606,7 +2603,7 @@ pub const Formatter = struct {
|
||||
}
|
||||
|
||||
// this case should never happen
|
||||
return try this.printAs(.Undefined, Writer, writer_, .undefined, .Cell, enable_ansi_colors);
|
||||
return try this.printAs(.Undefined, Writer, writer_, .js_undefined, .Cell, enable_ansi_colors);
|
||||
} else if (value.as(bun.api.Timer.TimeoutObject)) |timer| {
|
||||
this.addForNewLine("Timeout(# ) ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(timer.internals.id, 0)))));
|
||||
if (timer.internals.flags.kind == .setInterval) {
|
||||
@@ -2705,7 +2702,7 @@ pub const Formatter = struct {
|
||||
writer.writeAll(comptime Output.prettyFmt("<cyan>" ++ fmt ++ "<r>", enable_ansi_colors));
|
||||
},
|
||||
.Map => {
|
||||
const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0);
|
||||
const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0);
|
||||
const length = length_value.toInt32();
|
||||
|
||||
const prev_quote_strings = this.quote_strings;
|
||||
@@ -2812,7 +2809,7 @@ pub const Formatter = struct {
|
||||
writer.writeAll("}");
|
||||
},
|
||||
.Set => {
|
||||
const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0);
|
||||
const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0);
|
||||
const length = length_value.toInt32();
|
||||
|
||||
const prev_quote_strings = this.quote_strings;
|
||||
@@ -2855,7 +2852,7 @@ pub const Formatter = struct {
|
||||
writer.writeAll("}");
|
||||
},
|
||||
.toJSON => {
|
||||
if (value.get_unsafe(this.globalThis, "toJSON")) |func| brk: {
|
||||
if (try value.get(this.globalThis, "toJSON")) |func| brk: {
|
||||
const result = func.call(this.globalThis, value, &.{}) catch {
|
||||
this.globalThis.clearException();
|
||||
break :brk;
|
||||
@@ -2863,7 +2860,7 @@ pub const Formatter = struct {
|
||||
const prev_quote_keys = this.quote_keys;
|
||||
this.quote_keys = true;
|
||||
defer this.quote_keys = prev_quote_keys;
|
||||
const tag = ConsoleObject.Formatter.Tag.get(result, this.globalThis);
|
||||
const tag = try ConsoleObject.Formatter.Tag.get(result, this.globalThis);
|
||||
try this.format(tag, Writer, writer_, result, this.globalThis, enable_ansi_colors);
|
||||
return;
|
||||
}
|
||||
@@ -2895,13 +2892,13 @@ pub const Formatter = struct {
|
||||
writer.print("{}", .{str});
|
||||
},
|
||||
.Event => {
|
||||
const event_type_value = brk: {
|
||||
const value_ = value.get_unsafe(this.globalThis, "type") orelse break :brk JSValue.undefined;
|
||||
const event_type_value: JSValue = brk: {
|
||||
const value_ = try value.get(this.globalThis, "type") orelse break :brk .js_undefined;
|
||||
if (value_.isString()) {
|
||||
break :brk value_;
|
||||
}
|
||||
|
||||
break :brk JSValue.undefined;
|
||||
break :brk .js_undefined;
|
||||
};
|
||||
|
||||
const event_type = switch (try EventType.map.fromJS(this.globalThis, event_type_value) orelse .unknown) {
|
||||
@@ -2949,7 +2946,7 @@ pub const Formatter = struct {
|
||||
.{},
|
||||
);
|
||||
|
||||
const tag = Tag.getAdvanced(message_value, this.globalThis, .{
|
||||
const tag = try Tag.getAdvanced(message_value, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -2972,8 +2969,8 @@ pub const Formatter = struct {
|
||||
comptime Output.prettyFmt("<r><blue>data<d>:<r> ", enable_ansi_colors),
|
||||
.{},
|
||||
);
|
||||
const data = (try value.fastGet(this.globalThis, .data)) orelse JSValue.undefined;
|
||||
const tag = Tag.getAdvanced(data, this.globalThis, .{
|
||||
const data: JSValue = (try value.fastGet(this.globalThis, .data)) orelse .js_undefined;
|
||||
const tag = try Tag.getAdvanced(data, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -2995,7 +2992,7 @@ pub const Formatter = struct {
|
||||
.{},
|
||||
);
|
||||
|
||||
const tag = Tag.getAdvanced(error_value, this.globalThis, .{
|
||||
const tag = try Tag.getAdvanced(error_value, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -3029,8 +3026,8 @@ pub const Formatter = struct {
|
||||
|
||||
defer if (tag_name_slice.isAllocated()) tag_name_slice.deinit();
|
||||
|
||||
if (value.get_unsafe(this.globalThis, "type")) |type_value| {
|
||||
const _tag = Tag.getAdvanced(type_value, this.globalThis, .{
|
||||
if (try value.get(this.globalThis, "type")) |type_value| {
|
||||
const _tag = try Tag.getAdvanced(type_value, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -3062,7 +3059,7 @@ pub const Formatter = struct {
|
||||
writer.writeAll(tag_name_slice.slice());
|
||||
if (enable_ansi_colors) writer.writeAll(comptime Output.prettyFmt("<r>", enable_ansi_colors));
|
||||
|
||||
if (value.get_unsafe(this.globalThis, "key")) |key_value| {
|
||||
if (try value.get(this.globalThis, "key")) |key_value| {
|
||||
if (!key_value.isUndefinedOrNull()) {
|
||||
if (needs_space)
|
||||
writer.writeAll(" key=")
|
||||
@@ -3073,7 +3070,7 @@ pub const Formatter = struct {
|
||||
this.quote_strings = true;
|
||||
defer this.quote_strings = old_quote_strings;
|
||||
|
||||
try this.format(Tag.getAdvanced(key_value, this.globalThis, .{
|
||||
try this.format(try Tag.getAdvanced(key_value, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
}), Writer, writer_, key_value, this.globalThis, enable_ansi_colors);
|
||||
@@ -3082,7 +3079,7 @@ pub const Formatter = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (value.get_unsafe(this.globalThis, "props")) |props| {
|
||||
if (try value.get(this.globalThis, "props")) |props| {
|
||||
const prev_quote_strings = this.quote_strings;
|
||||
defer this.quote_strings = prev_quote_strings;
|
||||
this.quote_strings = true;
|
||||
@@ -3095,7 +3092,7 @@ pub const Formatter = struct {
|
||||
}).init(this.globalThis, props_obj);
|
||||
defer props_iter.deinit();
|
||||
|
||||
const children_prop = props.get_unsafe(this.globalThis, "children");
|
||||
const children_prop = try props.get(this.globalThis, "children");
|
||||
if (props_iter.len > 0) {
|
||||
{
|
||||
this.indent += 1;
|
||||
@@ -3107,7 +3104,7 @@ pub const Formatter = struct {
|
||||
continue;
|
||||
|
||||
const property_value = props_iter.value;
|
||||
const tag = Tag.getAdvanced(property_value, this.globalThis, .{
|
||||
const tag = try Tag.getAdvanced(property_value, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
});
|
||||
@@ -3156,7 +3153,7 @@ pub const Formatter = struct {
|
||||
}
|
||||
|
||||
if (children_prop) |children| {
|
||||
const tag = Tag.get(children, this.globalThis);
|
||||
const tag = try Tag.get(children, this.globalThis);
|
||||
|
||||
const print_children = switch (tag.tag) {
|
||||
.String, .JSX, .Array => true,
|
||||
@@ -3191,14 +3188,14 @@ pub const Formatter = struct {
|
||||
this.indent += 1;
|
||||
this.writeIndent(Writer, writer_) catch unreachable;
|
||||
defer this.indent -|= 1;
|
||||
try this.format(Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors);
|
||||
try this.format(try Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors);
|
||||
}
|
||||
|
||||
writer.writeAll("\n");
|
||||
this.writeIndent(Writer, writer_) catch unreachable;
|
||||
},
|
||||
.Array => {
|
||||
const length = children.getLength(this.globalThis);
|
||||
const length = try children.getLength(this.globalThis);
|
||||
if (length == 0) break :print_children;
|
||||
writer.writeAll(">\n");
|
||||
|
||||
@@ -3213,8 +3210,8 @@ pub const Formatter = struct {
|
||||
|
||||
var j: usize = 0;
|
||||
while (j < length) : (j += 1) {
|
||||
const child = children.getIndex(this.globalThis, @as(u32, @intCast(j)));
|
||||
try this.format(Tag.getAdvanced(child, this.globalThis, .{
|
||||
const child = try children.getIndex(this.globalThis, @as(u32, @intCast(j)));
|
||||
try this.format(try Tag.getAdvanced(child, this.globalThis, .{
|
||||
.hide_global = true,
|
||||
.disable_inspect_custom = this.disable_inspect_custom,
|
||||
}), Writer, writer_, child, this.globalThis, enable_ansi_colors);
|
||||
@@ -3304,14 +3301,11 @@ pub const Formatter = struct {
|
||||
});
|
||||
return;
|
||||
} else if (this.ordered_properties) {
|
||||
value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach);
|
||||
try value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach);
|
||||
} else {
|
||||
value.forEachProperty(this.globalThis, &iter, Iterator.forEach);
|
||||
try value.forEachProperty(this.globalThis, &iter, Iterator.forEach);
|
||||
}
|
||||
|
||||
if (this.globalThis.hasException()) {
|
||||
return error.JSError;
|
||||
}
|
||||
if (this.failed) return;
|
||||
|
||||
if (iter.i == 0) {
|
||||
@@ -3464,7 +3458,7 @@ pub const Formatter = struct {
|
||||
}
|
||||
// TODO: if (options.showProxy), print like `Proxy { target: ..., handlers: ... }`
|
||||
// this is default off so it is not used.
|
||||
try this.format(ConsoleObject.Formatter.Tag.get(target, this.globalThis), Writer, writer_, target, this.globalThis, enable_ansi_colors);
|
||||
try this.format(try ConsoleObject.Formatter.Tag.get(target, this.globalThis), Writer, writer_, target, this.globalThis, enable_ansi_colors);
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -3654,7 +3648,7 @@ pub fn timeLog(
|
||||
var writer = console.error_writer.writer();
|
||||
const Writer = @TypeOf(writer);
|
||||
for (args[0..args_len]) |arg| {
|
||||
const tag = ConsoleObject.Formatter.Tag.get(arg, global);
|
||||
const tag = ConsoleObject.Formatter.Tag.get(arg, global) catch return;
|
||||
_ = writer.write(" ") catch 0;
|
||||
if (Output.enable_ansi_colors_stderr) {
|
||||
fmt.format(tag, Writer, writer, arg, global, true) catch {}; // TODO:
|
||||
|
||||
@@ -5,9 +5,8 @@ pub fn mark(this: *Counters, comptime tag: Field) void {
|
||||
@field(this, @tagName(tag)) +|= 1;
|
||||
}
|
||||
|
||||
pub fn toJS(this: *const Counters, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
const obj = JSC.JSObject.create(this.*, globalObject);
|
||||
return obj.toJS();
|
||||
pub fn toJS(this: *const Counters, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue {
|
||||
return (try JSC.JSObject.create(this.*, globalObject)).toJS();
|
||||
}
|
||||
|
||||
pub fn createCountersObject(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
|
||||
@@ -429,7 +429,7 @@ pub const AsyncModule = struct {
|
||||
errorable = JSC.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| {
|
||||
switch (err) {
|
||||
error.JSError => {
|
||||
errorable = .err(error.JSError, this.globalThis.takeError(error.JSError).asVoid());
|
||||
errorable = .err(error.JSError, this.globalThis.takeError(error.JSError));
|
||||
break :outer;
|
||||
},
|
||||
else => {
|
||||
@@ -468,13 +468,16 @@ pub const AsyncModule = struct {
|
||||
specifier_: bun.String,
|
||||
referrer_: bun.String,
|
||||
log: *logger.Log,
|
||||
) void {
|
||||
) bun.JSExecutionTerminated!void {
|
||||
JSC.markBinding(@src());
|
||||
var specifier = specifier_;
|
||||
var referrer = referrer_;
|
||||
var scope: JSC.CatchScope = undefined;
|
||||
scope.init(globalThis, @src(), .enabled);
|
||||
defer {
|
||||
specifier.deref();
|
||||
referrer.deref();
|
||||
scope.deinit();
|
||||
}
|
||||
|
||||
var errorable: JSC.ErrorableResolvedSource = undefined;
|
||||
@@ -487,7 +490,7 @@ pub const AsyncModule = struct {
|
||||
}
|
||||
|
||||
if (e == error.JSError) {
|
||||
errorable = JSC.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError).asVoid());
|
||||
errorable = JSC.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError));
|
||||
} else {
|
||||
VirtualMachine.processFetchLog(
|
||||
globalThis,
|
||||
@@ -512,6 +515,7 @@ pub const AsyncModule = struct {
|
||||
&specifier,
|
||||
&referrer,
|
||||
);
|
||||
try scope.assertNoExceptionExceptTermination();
|
||||
}
|
||||
|
||||
pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void {
|
||||
@@ -1026,6 +1030,8 @@ pub fn transpileSourceCode(
|
||||
},
|
||||
};
|
||||
|
||||
const source = &parse_result.source;
|
||||
|
||||
if (parse_result.loader == .wasm) {
|
||||
return transpileSourceCode(
|
||||
jsc_vm,
|
||||
@@ -1071,7 +1077,7 @@ pub fn transpileSourceCode(
|
||||
if (loader == .json) {
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = bun.String.createUTF8(parse_result.source.contents),
|
||||
.source_code = bun.String.createUTF8(source.contents),
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = ResolvedSource.Tag.json_for_object_loader,
|
||||
@@ -1082,8 +1088,8 @@ pub fn transpileSourceCode(
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (comptime flags) {
|
||||
.print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
|
||||
.print_source => bun.String.init(parse_result.source.contents),
|
||||
.print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, source.contents) catch unreachable),
|
||||
.print_source => bun.String.init(source.contents),
|
||||
else => @compileError("unreachable"),
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
@@ -1115,7 +1121,7 @@ pub fn transpileSourceCode(
|
||||
const bytecode_slice = parse_result.already_bundled.bytecodeSlice();
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = bun.String.createLatin1(parse_result.source.contents),
|
||||
.source_code = bun.String.createLatin1(source.contents),
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.already_bundled = true,
|
||||
@@ -1127,7 +1133,7 @@ pub fn transpileSourceCode(
|
||||
|
||||
if (parse_result.empty) {
|
||||
const was_cjs = (loader == .js or loader == .ts) and brk: {
|
||||
const ext = std.fs.path.extension(parse_result.source.path.text);
|
||||
const ext = std.fs.path.extension(source.path.text);
|
||||
break :brk strings.eqlComptime(ext, ".cjs") or strings.eqlComptime(ext, ".cts");
|
||||
};
|
||||
if (was_cjs) {
|
||||
@@ -1143,7 +1149,7 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
|
||||
if (cache.entry) |*entry| {
|
||||
jsc_vm.source_mappings.putMappings(parse_result.source, .{
|
||||
jsc_vm.source_mappings.putMappings(source, .{
|
||||
.list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len },
|
||||
.allocator = bun.default_allocator,
|
||||
}) catch {};
|
||||
@@ -1167,10 +1173,10 @@ pub fn transpileSourceCode(
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.tag = brk: {
|
||||
if (entry.metadata.module_type == .cjs and parse_result.source.path.isFile()) {
|
||||
if (entry.metadata.module_type == .cjs and source.path.isFile()) {
|
||||
const actual_package_json: *PackageJSON = package_json orelse brk2: {
|
||||
// this should already be cached virtually always so it's fine to do this
|
||||
const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(parse_result.source.path.name.dir) catch null) orelse
|
||||
const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(source.path.name.dir) catch null) orelse
|
||||
break :brk .javascript;
|
||||
|
||||
break :brk2 dir_info.package_json orelse dir_info.enclosing_package_json;
|
||||
@@ -1204,7 +1210,7 @@ pub fn transpileSourceCode(
|
||||
return error.UnexpectedPendingResolution;
|
||||
}
|
||||
|
||||
if (parse_result.source.contents_is_recycled) {
|
||||
if (source.contents_is_recycled) {
|
||||
// this shared buffer is about to become owned by the AsyncModule struct
|
||||
jsc_vm.transpiler.resolver.caches.fs.resetSharedBuffer(
|
||||
jsc_vm.transpiler.resolver.caches.fs.sharedBuffer(),
|
||||
@@ -1334,7 +1340,7 @@ pub fn transpileSourceCode(
|
||||
|
||||
// return ResolvedSource{
|
||||
// .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null,
|
||||
// .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
|
||||
// .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, source.contents) catch unreachable),
|
||||
// .specifier = ZigString.init(specifier),
|
||||
// .source_url = input_specifier.createIfDifferent(path.text),
|
||||
// .tag = ResolvedSource.Tag.wasm,
|
||||
@@ -1614,7 +1620,7 @@ pub export fn Bun__transpileFile(
|
||||
var virtual_source_to_use: ?logger.Source = null;
|
||||
var blob_to_deinit: ?JSC.WebCore.Blob = null;
|
||||
var lr = options.getLoaderAndVirtualSource(_specifier.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, type_attribute_str) catch {
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS().asVoid());
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS());
|
||||
return null;
|
||||
};
|
||||
defer if (blob_to_deinit) |*blob| blob.deinit();
|
||||
@@ -1813,7 +1819,7 @@ pub export fn Bun__transpileFile(
|
||||
},
|
||||
error.PluginError => return null,
|
||||
error.JSError => {
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError).asVoid());
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError));
|
||||
return null;
|
||||
},
|
||||
else => {
|
||||
@@ -1989,7 +1995,7 @@ export fn Bun__transpileVirtualModule(
|
||||
switch (err) {
|
||||
error.PluginError => return true,
|
||||
error.JSError => {
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError).asVoid());
|
||||
ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError));
|
||||
return true;
|
||||
},
|
||||
else => {
|
||||
@@ -2138,12 +2144,12 @@ pub const RuntimeTranspilerStore = struct {
|
||||
}
|
||||
|
||||
// This is run at the top of the event loop on the JS thread.
|
||||
pub fn drain(this: *RuntimeTranspilerStore) void {
|
||||
pub fn drain(this: *RuntimeTranspilerStore) bun.JSExecutionTerminated!void {
|
||||
var batch = this.queue.popBatch();
|
||||
var iter = batch.iterator();
|
||||
if (iter.next()) |job| {
|
||||
// we run just one job first to see if there are more
|
||||
job.runFromJSThread();
|
||||
try job.runFromJSThread();
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
@@ -2153,8 +2159,8 @@ pub const RuntimeTranspilerStore = struct {
|
||||
const jsc_vm = vm.jsc;
|
||||
while (iter.next()) |job| {
|
||||
// if there are more, we need to drain the microtasks from the previous run
|
||||
event_loop.drainMicrotasksWithGlobal(global, jsc_vm);
|
||||
job.runFromJSThread();
|
||||
try event_loop.drainMicrotasksWithGlobal(global, jsc_vm);
|
||||
try job.runFromJSThread();
|
||||
}
|
||||
|
||||
// immediately after this is called, the microtasks will be drained again.
|
||||
@@ -2261,7 +2267,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
this.vm.eventLoop().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(&this.vm.transpiler_store));
|
||||
}
|
||||
|
||||
pub fn runFromJSThread(this: *TranspilerJob) void {
|
||||
pub fn runFromJSThread(this: *TranspilerJob) bun.JSExecutionTerminated!void {
|
||||
var vm = this.vm;
|
||||
const promise = this.promise.swap();
|
||||
const globalThis = this.globalThis;
|
||||
@@ -2294,7 +2300,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
|
||||
_ = vm.transpiler_store.store.put(this);
|
||||
|
||||
ModuleLoader.AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log);
|
||||
try ModuleLoader.AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log);
|
||||
}
|
||||
|
||||
pub fn schedule(this: *TranspilerJob) void {
|
||||
@@ -2482,7 +2488,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
}
|
||||
|
||||
if (cache.entry) |*entry| {
|
||||
vm.source_mappings.putMappings(parse_result.source, .{
|
||||
vm.source_mappings.putMappings(&parse_result.source, .{
|
||||
.list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len },
|
||||
.allocator = bun.default_allocator,
|
||||
}) catch {};
|
||||
@@ -2632,7 +2638,6 @@ pub const FetchFlags = enum {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
pub const HardcodedModule = enum {
|
||||
bun,
|
||||
@"abort-controller",
|
||||
|
||||
@@ -41,6 +41,7 @@ pub fn kill(this: *ProcessAutoKiller) Result {
|
||||
fn killProcesses(this: *ProcessAutoKiller) u32 {
|
||||
var count: u32 = 0;
|
||||
while (this.processes.pop()) |process| {
|
||||
defer process.key.deref();
|
||||
if (!process.key.hasExited()) {
|
||||
log("process.kill {d}", .{process.key.pid});
|
||||
count += @as(u32, @intFromBool(process.key.kill(@intFromEnum(bun.SignalCode.default)) == .result));
|
||||
@@ -50,6 +51,10 @@ fn killProcesses(this: *ProcessAutoKiller) u32 {
|
||||
}
|
||||
|
||||
pub fn clear(this: *ProcessAutoKiller) void {
|
||||
for (this.processes.keys()) |process| {
|
||||
process.deref();
|
||||
}
|
||||
|
||||
if (this.processes.capacity() > 256) {
|
||||
this.processes.clearAndFree(bun.default_allocator);
|
||||
}
|
||||
@@ -58,15 +63,23 @@ pub fn clear(this: *ProcessAutoKiller) void {
|
||||
}
|
||||
|
||||
pub fn onSubprocessSpawn(this: *ProcessAutoKiller, process: *bun.spawn.Process) void {
|
||||
if (this.enabled)
|
||||
this.processes.put(bun.default_allocator, process, {}) catch {};
|
||||
if (this.enabled) {
|
||||
this.processes.put(bun.default_allocator, process, {}) catch return;
|
||||
process.ref();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onSubprocessExit(this: *ProcessAutoKiller, process: *bun.spawn.Process) void {
|
||||
if (this.ever_enabled)
|
||||
_ = this.processes.swapRemove(process);
|
||||
if (this.ever_enabled) {
|
||||
if (this.processes.swapRemove(process)) {
|
||||
process.deref();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(this: *ProcessAutoKiller) void {
|
||||
for (this.processes.keys()) |process| {
|
||||
process.deref();
|
||||
}
|
||||
this.processes.deinit(bun.default_allocator);
|
||||
}
|
||||
|
||||
@@ -45,6 +45,7 @@ pub const ResolveMessage = struct {
|
||||
else
|
||||
break :brk "ERR_MODULE_NOT_FOUND",
|
||||
|
||||
.html_manifest,
|
||||
.entry_point_run,
|
||||
.entry_point_build,
|
||||
.at,
|
||||
@@ -60,7 +61,7 @@ pub const ResolveMessage = struct {
|
||||
defer atom.deref();
|
||||
return atom.toJS(globalObject);
|
||||
},
|
||||
else => return .undefined,
|
||||
else => return .js_undefined,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
/// Version 11: Fix \uFFFF printing regression
|
||||
/// Version 12: "use strict"; makes it CommonJS if we otherwise don't know which one to pick.
|
||||
/// Version 13: Hoist `import.meta.require` definition, see #15738
|
||||
const expected_version = 13;
|
||||
/// Version 14: Updated global defines table list.
|
||||
const expected_version = 14;
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
@@ -130,7 +130,7 @@ pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *a
|
||||
|
||||
pub const HashTable = std.HashMap(u64, *anyopaque, bun.IdentityContext(u64), 80);
|
||||
|
||||
pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void {
|
||||
pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void {
|
||||
try this.putMappings(source, chunk.buffer);
|
||||
}
|
||||
|
||||
@@ -159,7 +159,7 @@ pub fn deinit(this: *SavedSourceMap) void {
|
||||
this.map.deinit();
|
||||
}
|
||||
|
||||
pub fn putMappings(this: *SavedSourceMap, source: logger.Source, mappings: MutableString) !void {
|
||||
pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) !void {
|
||||
try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr)));
|
||||
}
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ comptime {
|
||||
@export(&specifierIsEvalEntryPoint, .{ .name = "Bun__VM__specifierIsEvalEntryPoint" });
|
||||
@export(&string_allocation_limit, .{ .name = "Bun__stringSyntheticAllocationLimit" });
|
||||
@export(&allowAddons, .{ .name = "Bun__VM__allowAddons" });
|
||||
@export(&allowRejectionHandledWarning, .{ .name = "Bun__VM__allowRejectionHandledWarning" });
|
||||
}
|
||||
|
||||
global: *JSGlobalObject,
|
||||
@@ -196,6 +197,15 @@ pub const OnException = fn (*ZigException) void;
|
||||
pub fn allowAddons(this: *VirtualMachine) callconv(.c) bool {
|
||||
return if (this.transpiler.options.transform_options.allow_addons) |allow_addons| allow_addons else true;
|
||||
}
|
||||
pub fn allowRejectionHandledWarning(this: *VirtualMachine) callconv(.C) bool {
|
||||
return this.unhandledRejectionsMode() != .bun;
|
||||
}
|
||||
pub fn unhandledRejectionsMode(this: *VirtualMachine) Api.UnhandledRejections {
|
||||
return this.transpiler.options.transform_options.unhandled_rejections orelse switch (bun.FeatureFlags.breaking_changes_1_3) {
|
||||
false => .bun,
|
||||
true => .throw,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initRequestBodyValue(this: *VirtualMachine, body: JSC.WebCore.Body.Value) !*Body.Value.HiveRef {
|
||||
return .init(body, &this.body_value_hive_allocator);
|
||||
@@ -349,7 +359,7 @@ const SourceMapHandlerGetter = struct {
|
||||
/// When the inspector is enabled, we want to generate an inline sourcemap.
|
||||
/// And, for now, we also store it in source_mappings like normal
|
||||
/// This is hideously expensive memory-wise...
|
||||
pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void {
|
||||
pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void {
|
||||
var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator);
|
||||
defer temp_json_buffer.deinit();
|
||||
temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true);
|
||||
@@ -496,25 +506,117 @@ pub fn loadExtraEnvAndSourceCodePrinter(this: *VirtualMachine) void {
|
||||
|
||||
extern fn Bun__handleUncaughtException(*JSGlobalObject, err: JSValue, is_rejection: c_int) c_int;
|
||||
extern fn Bun__handleUnhandledRejection(*JSGlobalObject, reason: JSValue, promise: JSValue) c_int;
|
||||
extern fn Bun__wrapUnhandledRejectionErrorForUncaughtException(*JSGlobalObject, reason: JSValue) JSValue;
|
||||
extern fn Bun__emitHandledPromiseEvent(*JSGlobalObject, promise: JSValue) bool;
|
||||
extern fn Bun__promises__isErrorLike(*JSGlobalObject, reason: JSValue) bool;
|
||||
extern fn Bun__promises__emitUnhandledRejectionWarning(*JSGlobalObject, reason: JSValue, promise: JSValue) void;
|
||||
extern fn Bun__noSideEffectsToString(vm: *JSC.VM, globalObject: *JSGlobalObject, reason: JSValue) JSValue;
|
||||
|
||||
pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) bool {
|
||||
fn isErrorLike(globalObject: *JSGlobalObject, reason: JSValue) bun.JSError!bool {
|
||||
const result = Bun__promises__isErrorLike(globalObject, reason);
|
||||
if (globalObject.hasException()) return error.JSError;
|
||||
return result;
|
||||
}
|
||||
|
||||
fn wrapUnhandledRejectionErrorForUncaughtException(globalObject: *JSGlobalObject, reason: JSValue) JSValue {
|
||||
if (isErrorLike(globalObject, reason) catch blk: {
|
||||
if (globalObject.hasException()) globalObject.clearException();
|
||||
break :blk false;
|
||||
}) return reason;
|
||||
const reasonStr = Bun__noSideEffectsToString(globalObject.vm(), globalObject, reason);
|
||||
if (globalObject.hasException()) globalObject.clearException();
|
||||
const msg = "This error originated either by throwing inside of an async function without a catch block, " ++
|
||||
"or by rejecting a promise which was not handled with .catch(). The promise rejected with the reason \"" ++
|
||||
"{s}" ++
|
||||
"\".";
|
||||
if (reasonStr.isString()) {
|
||||
return globalObject.ERR(.UNHANDLED_REJECTION, msg, .{reasonStr.asString().view(globalObject)}).toJS();
|
||||
}
|
||||
return globalObject.ERR(.UNHANDLED_REJECTION, msg, .{"undefined"}).toJS();
|
||||
}
|
||||
|
||||
pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) void {
|
||||
if (this.isShuttingDown()) {
|
||||
Output.debugWarn("unhandledRejection during shutdown.", .{});
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (isBunTest) {
|
||||
this.unhandled_error_counter += 1;
|
||||
this.onUnhandledRejection(this, globalObject, reason);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (this.unhandledRejectionsMode()) {
|
||||
.bun => {
|
||||
if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return;
|
||||
// continue to default handler
|
||||
},
|
||||
.none => {
|
||||
defer this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return;
|
||||
return; // ignore the unhandled rejection
|
||||
},
|
||||
.warn => {
|
||||
defer this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
_ = Bun__handleUnhandledRejection(globalObject, reason, promise);
|
||||
Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise);
|
||||
return;
|
||||
},
|
||||
.warn_with_error_code => {
|
||||
defer this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return;
|
||||
Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise);
|
||||
this.exit_handler.exit_code = 1;
|
||||
return;
|
||||
},
|
||||
.strict => {
|
||||
defer this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
const wrapped_reason = wrapUnhandledRejectionErrorForUncaughtException(globalObject, reason);
|
||||
_ = this.uncaughtException(globalObject, wrapped_reason, true);
|
||||
if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return;
|
||||
Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise);
|
||||
return;
|
||||
},
|
||||
.throw => {
|
||||
if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) {
|
||||
this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
return;
|
||||
}
|
||||
const wrapped_reason = wrapUnhandledRejectionErrorForUncaughtException(globalObject, reason);
|
||||
if (this.uncaughtException(globalObject, wrapped_reason, true)) {
|
||||
this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => {}, // we are returning anyway
|
||||
};
|
||||
return;
|
||||
}
|
||||
// continue to default handler
|
||||
this.eventLoop().drainMicrotasks() catch |e| switch (e) {
|
||||
error.JSExecutionTerminated => return,
|
||||
};
|
||||
},
|
||||
}
|
||||
this.unhandled_error_counter += 1;
|
||||
this.onUnhandledRejection(this, globalObject, reason);
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn handledPromise(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, promise: JSValue) bool {
|
||||
if (this.isShuttingDown()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const handled = Bun__handleUnhandledRejection(globalObject, reason, promise) > 0;
|
||||
if (!handled) {
|
||||
this.unhandled_error_counter += 1;
|
||||
this.onUnhandledRejection(this, globalObject, reason);
|
||||
}
|
||||
return handled;
|
||||
return Bun__emitHandledPromiseEvent(globalObject, promise);
|
||||
}
|
||||
|
||||
pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, err: JSValue, is_rejection: bool) bool {
|
||||
@@ -554,7 +656,7 @@ pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObjec
|
||||
pub fn handlePendingInternalPromiseRejection(this: *JSC.VirtualMachine) void {
|
||||
var promise = this.pending_internal_promise.?;
|
||||
if (promise.status(this.global.vm()) == .rejected and !promise.isHandled(this.global.vm())) {
|
||||
_ = this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue());
|
||||
this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue());
|
||||
promise.setHandled(this.global.vm());
|
||||
}
|
||||
}
|
||||
@@ -1578,7 +1680,7 @@ pub fn resolveMaybeNeedsTrailingSlash(
|
||||
printed,
|
||||
),
|
||||
};
|
||||
res.* = ErrorableString.err(error.NameTooLong, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())).asVoid());
|
||||
res.* = ErrorableString.err(error.NameTooLong, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1668,7 +1770,7 @@ pub fn resolveMaybeNeedsTrailingSlash(
|
||||
};
|
||||
|
||||
{
|
||||
res.* = ErrorableString.err(err, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())).asVoid());
|
||||
res.* = ErrorableString.err(err, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())));
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -1686,11 +1788,12 @@ pub const main_file_name: string = "bun:main";
|
||||
pub export fn Bun__drainMicrotasksFromJS(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
|
||||
_ = callframe; // autofix
|
||||
globalObject.bunVM().drainMicrotasks();
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
pub fn drainMicrotasks(this: *VirtualMachine) void {
|
||||
this.eventLoop().drainMicrotasks();
|
||||
// TODO: properly propagate exception upwards
|
||||
this.eventLoop().drainMicrotasks() catch {};
|
||||
}
|
||||
|
||||
pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, referrer: bun.String, log: *logger.Log, ret: *ErrorableResolvedSource, err: anyerror) void {
|
||||
@@ -1712,7 +1815,7 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer
|
||||
};
|
||||
};
|
||||
{
|
||||
ret.* = ErrorableResolvedSource.err(err, (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)).asVoid());
|
||||
ret.* = ErrorableResolvedSource.err(err, (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)));
|
||||
}
|
||||
return;
|
||||
},
|
||||
@@ -1720,13 +1823,13 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer
|
||||
1 => {
|
||||
const msg = log.msgs.items[0];
|
||||
ret.* = ErrorableResolvedSource.err(err, switch (msg.metadata) {
|
||||
.build => (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)).asVoid(),
|
||||
.build => (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)),
|
||||
.resolve => (bun.api.ResolveMessage.create(
|
||||
globalThis,
|
||||
globalThis.allocator(),
|
||||
msg,
|
||||
referrer.toUTF8(bun.default_allocator).slice(),
|
||||
) catch |e| globalThis.takeException(e)).asVoid(),
|
||||
) catch |e| globalThis.takeException(e)),
|
||||
});
|
||||
return;
|
||||
},
|
||||
@@ -1759,7 +1862,7 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer
|
||||
specifier,
|
||||
}) catch unreachable,
|
||||
),
|
||||
).asVoid(),
|
||||
),
|
||||
);
|
||||
},
|
||||
}
|
||||
@@ -1828,8 +1931,7 @@ pub noinline fn runErrorHandler(this: *VirtualMachine, result: JSValue, exceptio
|
||||
|
||||
const writer = buffered_writer.writer();
|
||||
|
||||
if (result.isException(this.global.vm())) {
|
||||
const exception = @as(*Exception, @ptrCast(result.asVoid()));
|
||||
if (result.asException(this.jsc)) |exception| {
|
||||
this.printException(
|
||||
exception,
|
||||
exception_list,
|
||||
@@ -1905,7 +2007,7 @@ fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise {
|
||||
return error.ModuleNotFound;
|
||||
},
|
||||
};
|
||||
var promise = JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text));
|
||||
var promise = try JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text));
|
||||
|
||||
this.pending_internal_promise = promise;
|
||||
JSValue.fromCell(promise).protect();
|
||||
@@ -2345,7 +2447,7 @@ fn printErrorFromMaybePrivateData(
|
||||
pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *Exception) JSValue {
|
||||
var jsc_vm = globalObject.bunVM();
|
||||
_ = jsc_vm.uncaughtException(globalObject, exception.value(), false);
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
pub fn printStackTrace(comptime Writer: type, writer: Writer, trace: ZigStackTrace, comptime allow_ansi_colors: bool) !void {
|
||||
@@ -3009,7 +3111,7 @@ fn printErrorInstance(
|
||||
}
|
||||
|
||||
formatter.format(
|
||||
JSC.Formatter.Tag.getAdvanced(
|
||||
try JSC.Formatter.Tag.getAdvanced(
|
||||
value,
|
||||
this.global,
|
||||
.{ .disable_inspect_custom = true, .hide_global = true },
|
||||
@@ -3050,7 +3152,7 @@ fn printErrorInstance(
|
||||
|
||||
// "cause" is not enumerable, so the above loop won't see it.
|
||||
if (!saw_cause) {
|
||||
if (error_instance.getOwn(this.global, "cause")) |cause| {
|
||||
if (try error_instance.getOwn(this.global, "cause")) |cause| {
|
||||
if (cause.jsType() == .ErrorInstance) {
|
||||
cause.protect();
|
||||
try errors_to_append.append(cause);
|
||||
@@ -3059,7 +3161,7 @@ fn printErrorInstance(
|
||||
}
|
||||
} else if (mode == .js and error_instance != .zero) {
|
||||
// If you do reportError([1,2,3]] we should still show something at least.
|
||||
const tag = JSC.Formatter.Tag.getAdvanced(
|
||||
const tag = try JSC.Formatter.Tag.getAdvanced(
|
||||
error_instance,
|
||||
this.global,
|
||||
.{ .disable_inspect_custom = true, .hide_global = true },
|
||||
@@ -3382,7 +3484,7 @@ pub const IPCInstance = struct {
|
||||
Process__emitDisconnectEvent(vm.global);
|
||||
event_loop.exit();
|
||||
if (Environment.isPosix) {
|
||||
uws.us_socket_context_free(0, this.context);
|
||||
this.context.deinit(false);
|
||||
}
|
||||
vm.channel_ref.disable();
|
||||
}
|
||||
@@ -3412,7 +3514,7 @@ pub fn getIPCInstance(this: *VirtualMachine) ?*IPCInstance {
|
||||
|
||||
const instance = switch (Environment.os) {
|
||||
else => instance: {
|
||||
const context = uws.us_create_bun_nossl_socket_context(this.event_loop_handle.?, @sizeOf(usize)).?;
|
||||
const context = uws.SocketContext.createNoSSLContext(this.event_loop_handle.?, @sizeOf(usize)).?;
|
||||
IPC.Socket.configure(context, true, *IPC.SendQueue, IPC.IPCHandlers.PosixSocket);
|
||||
|
||||
var instance = IPCInstance.new(.{
|
||||
|
||||
@@ -28,8 +28,8 @@ pub const JSBundler = @import("api/JSBundler.zig").JSBundler;
|
||||
pub const JSTranspiler = @import("api/JSTranspiler.zig");
|
||||
pub const Listener = @import("api/bun/socket.zig").Listener;
|
||||
pub const MatchedRoute = @import("api/filesystem_router.zig").MatchedRoute;
|
||||
pub const NativeBrotli = @import("node/node_zlib_binding.zig").SNativeBrotli;
|
||||
pub const NativeZlib = @import("node/node_zlib_binding.zig").SNativeZlib;
|
||||
pub const NativeBrotli = @import("node/zlib/NativeBrotli.zig");
|
||||
pub const NativeZlib = @import("node/zlib/NativeZlib.zig");
|
||||
pub const NodeHTTPResponse = @import("api/server.zig").NodeHTTPResponse;
|
||||
pub const Postgres = @import("../sql/postgres.zig");
|
||||
pub const ResolveMessage = @import("ResolveMessage.zig").ResolveMessage;
|
||||
@@ -44,6 +44,7 @@ pub const SocketHandlers = @import("api/bun/socket.zig").Handlers;
|
||||
pub const UDPSocket = @import("api/bun/udp_socket.zig").UDPSocket;
|
||||
pub const Valkey = @import("../valkey/js_valkey.zig").JSValkeyClient;
|
||||
pub const BlockList = @import("./node/net/BlockList.zig");
|
||||
pub const NativeZstd = @import("./node/zlib/NativeZstd.zig");
|
||||
|
||||
pub const napi = @import("../napi/napi.zig");
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ pub const BunObject = struct {
|
||||
fn toJSGetter(comptime getter: anytype) LazyPropertyCallback {
|
||||
return struct {
|
||||
pub fn callback(this: *JSC.JSGlobalObject, object: *JSC.JSObject) callconv(JSC.conv) JSValue {
|
||||
return @call(.always_inline, getter, .{ this, object });
|
||||
return bun.jsc.toJSHostCall(this, @src(), getter, .{ this, object });
|
||||
}
|
||||
}.callback;
|
||||
}
|
||||
@@ -356,7 +356,7 @@ pub fn inspectTable(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame)
|
||||
|
||||
if (!arguments[1].isArray()) {
|
||||
arguments[2] = arguments[1];
|
||||
arguments[1] = .undefined;
|
||||
arguments[1] = .js_undefined;
|
||||
}
|
||||
|
||||
var formatOptions = ConsoleObject.FormatOptions{
|
||||
@@ -380,8 +380,8 @@ pub fn inspectTable(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame)
|
||||
|
||||
const writer = buffered_writer.writer();
|
||||
const Writer = @TypeOf(writer);
|
||||
const properties = if (arguments[1].jsType().isArray()) arguments[1] else JSValue.undefined;
|
||||
var table_printer = ConsoleObject.TablePrinter.init(
|
||||
const properties: JSValue = if (arguments[1].jsType().isArray()) arguments[1] else .js_undefined;
|
||||
var table_printer = try ConsoleObject.TablePrinter.init(
|
||||
globalThis,
|
||||
.Log,
|
||||
value,
|
||||
@@ -527,7 +527,7 @@ pub fn registerMacro(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFram
|
||||
arguments[1].protect();
|
||||
get_or_put_result.value_ptr.* = arguments[1].asObjectRef();
|
||||
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
pub fn getCWD(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
@@ -694,7 +694,7 @@ pub fn openInEditor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame)
|
||||
return globalThis.throw("Opening editor failed {s}", .{@errorName(err)});
|
||||
};
|
||||
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
pub fn getPublicPath(to: string, origin: URL, comptime Writer: type, writer: Writer) void {
|
||||
@@ -768,7 +768,7 @@ pub fn sleepSync(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b
|
||||
}
|
||||
|
||||
std.time.sleep(@as(u64, @intCast(milliseconds)) * std.time.ns_per_ms);
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
pub fn gc(vm: *JSC.VirtualMachine, sync: bool) usize {
|
||||
@@ -780,7 +780,7 @@ export fn Bun__gc(vm: *JSC.VirtualMachine, sync: bool) callconv(.C) usize {
|
||||
|
||||
pub fn shrink(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
globalObject.vm().shrinkFootprint();
|
||||
return .undefined;
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
fn doResolve(globalThis: *JSC.JSGlobalObject, arguments: []const JSValue) bun.JSError!JSC.JSValue {
|
||||
@@ -847,7 +847,7 @@ fn doResolveWithArgs(ctx: *JSC.JSGlobalObject, specifier: bun.String, from: bun.
|
||||
);
|
||||
|
||||
if (!errorable.success) {
|
||||
return ctx.throwValue(bun.cast(JSC.C.JSValueRef, errorable.result.err.ptr.?).?.value());
|
||||
return ctx.throwValue(errorable.result.err.value);
|
||||
}
|
||||
|
||||
if (query_string.len > 0) {
|
||||
@@ -905,7 +905,7 @@ export fn Bun__resolveSync(global: *JSGlobalObject, specifier: JSValue, source:
|
||||
const source_str = source.toBunString(global) catch return .zero;
|
||||
defer source_str.deref();
|
||||
|
||||
return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source_str, is_esm, true, is_user_require_resolve));
|
||||
return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source_str, is_esm, true, is_user_require_resolve });
|
||||
}
|
||||
|
||||
export fn Bun__resolveSyncWithPaths(
|
||||
@@ -934,12 +934,12 @@ export fn Bun__resolveSyncWithPaths(
|
||||
bun_vm.transpiler.resolver.custom_dir_paths = paths;
|
||||
defer bun_vm.transpiler.resolver.custom_dir_paths = null;
|
||||
|
||||
return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source_str, is_esm, true, is_user_require_resolve));
|
||||
return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source_str, is_esm, true, is_user_require_resolve });
|
||||
}
|
||||
|
||||
export fn Bun__resolveSyncWithStrings(global: *JSGlobalObject, specifier: *bun.String, source: *bun.String, is_esm: bool) JSC.JSValue {
|
||||
Output.scoped(.importMetaResolve, false)("source: {s}, specifier: {s}", .{ source.*, specifier.* });
|
||||
return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier.*, source.*, is_esm, true, false));
|
||||
return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier.*, source.*, is_esm, true, false });
|
||||
}
|
||||
|
||||
export fn Bun__resolveSyncWithSource(global: *JSGlobalObject, specifier: JSValue, source: *bun.String, is_esm: bool, is_user_require_resolve: bool) JSC.JSValue {
|
||||
@@ -948,7 +948,7 @@ export fn Bun__resolveSyncWithSource(global: *JSGlobalObject, specifier: JSValue
|
||||
if (specifier_str.length() == 0) {
|
||||
return global.ERR(.INVALID_ARG_VALUE, "The argument 'id' must be a non-empty string. Received ''", .{}).throw() catch .zero;
|
||||
}
|
||||
return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source.*, is_esm, true, is_user_require_resolve));
|
||||
return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source.*, is_esm, true, is_user_require_resolve });
|
||||
}
|
||||
|
||||
pub fn indexOfLine(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
@@ -1043,22 +1043,22 @@ pub fn serve(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.J
|
||||
@field(@TypeOf(entry.tag()), @typeName(JSC.API.HTTPServer)) => {
|
||||
var server: *JSC.API.HTTPServer = entry.as(JSC.API.HTTPServer);
|
||||
server.onReloadFromZig(&config, globalObject);
|
||||
return server.js_value.get() orelse .undefined;
|
||||
return server.js_value.get() orelse .js_undefined;
|
||||
},
|
||||
@field(@TypeOf(entry.tag()), @typeName(JSC.API.DebugHTTPServer)) => {
|
||||
var server: *JSC.API.DebugHTTPServer = entry.as(JSC.API.DebugHTTPServer);
|
||||
server.onReloadFromZig(&config, globalObject);
|
||||
return server.js_value.get() orelse .undefined;
|
||||
return server.js_value.get() orelse .js_undefined;
|
||||
},
|
||||
@field(@TypeOf(entry.tag()), @typeName(JSC.API.DebugHTTPSServer)) => {
|
||||
var server: *JSC.API.DebugHTTPSServer = entry.as(JSC.API.DebugHTTPSServer);
|
||||
server.onReloadFromZig(&config, globalObject);
|
||||
return server.js_value.get() orelse .undefined;
|
||||
return server.js_value.get() orelse .js_undefined;
|
||||
},
|
||||
@field(@TypeOf(entry.tag()), @typeName(JSC.API.HTTPSServer)) => {
|
||||
var server: *JSC.API.HTTPSServer = entry.as(JSC.API.HTTPSServer);
|
||||
server.onReloadFromZig(&config, globalObject);
|
||||
return server.js_value.get() orelse .undefined;
|
||||
return server.js_value.get() orelse .js_undefined;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -1288,7 +1288,7 @@ pub fn getS3DefaultClient(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC
|
||||
}
|
||||
|
||||
pub fn getValkeyDefaultClient(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
const valkey = JSC.API.Valkey.create(globalThis, &[_]JSValue{.undefined}) catch |err| {
|
||||
const valkey = JSC.API.Valkey.create(globalThis, &.{.js_undefined}) catch |err| {
|
||||
if (err != error.JSError) {
|
||||
_ = globalThis.throwError(err, "Failed to create Redis client") catch {};
|
||||
return .zero;
|
||||
@@ -1303,9 +1303,9 @@ pub fn getValkeyClientConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObj
|
||||
return JSC.API.Valkey.js.getConstructor(globalThis);
|
||||
}
|
||||
|
||||
pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) bun.JSError!JSC.JSValue {
|
||||
const vm = globalThis.bunVM();
|
||||
const graph = vm.standalone_module_graph orelse return JSC.JSValue.createEmptyArray(globalThis, 0);
|
||||
const graph = vm.standalone_module_graph orelse return try JSC.JSValue.createEmptyArray(globalThis, 0);
|
||||
|
||||
const unsorted_files = graph.files.values();
|
||||
var sort_indices = std.ArrayList(u32).initCapacity(bun.default_allocator, unsorted_files.len) catch bun.outOfMemory();
|
||||
@@ -1320,7 +1320,7 @@ pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.J
|
||||
}
|
||||
|
||||
var i: u32 = 0;
|
||||
var array = JSC.JSValue.createEmptyArray(globalThis, sort_indices.items.len);
|
||||
var array = try JSC.JSValue.createEmptyArray(globalThis, sort_indices.items.len);
|
||||
std.mem.sort(u32, sort_indices.items, unsorted_files, bun.StandaloneModuleGraph.File.lessThanByIndex);
|
||||
for (sort_indices.items) |index| {
|
||||
const file = &unsorted_files[index];
|
||||
@@ -1459,7 +1459,7 @@ pub const JSZlib = struct {
|
||||
// This has to be `inline` due to the callframe.
|
||||
inline fn getOptions(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue } {
|
||||
const arguments = callframe.arguments_old(2).slice();
|
||||
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
|
||||
const buffer_value: JSValue = if (arguments.len > 0) arguments[0] else .js_undefined;
|
||||
const options_val: ?JSValue =
|
||||
if (arguments.len > 1 and arguments[1].isObject())
|
||||
arguments[1]
|
||||
@@ -1731,7 +1731,7 @@ pub const JSZstd = struct {
|
||||
|
||||
inline fn getOptions(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue } {
|
||||
const arguments = callframe.arguments();
|
||||
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
|
||||
const buffer_value: JSValue = if (arguments.len > 0) arguments[0] else .js_undefined;
|
||||
const options_val: ?JSValue =
|
||||
if (arguments.len > 1 and arguments[1].isObject())
|
||||
arguments[1]
|
||||
@@ -1765,7 +1765,7 @@ pub const JSZstd = struct {
|
||||
|
||||
inline fn getOptionsAsync(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!struct { JSC.Node.StringOrBuffer, ?JSValue, i32 } {
|
||||
const arguments = callframe.arguments();
|
||||
const buffer_value = if (arguments.len > 0) arguments[0] else .undefined;
|
||||
const buffer_value: JSValue = if (arguments.len > 0) arguments[0] else .js_undefined;
|
||||
const options_val: ?JSValue =
|
||||
if (arguments.len > 1 and arguments[1].isObject())
|
||||
arguments[1]
|
||||
|
||||
@@ -413,7 +413,7 @@ const ValueOrError = union(enum) {
|
||||
};
|
||||
|
||||
pub fn getPtrSlice(globalThis: *JSGlobalObject, value: JSValue, byteOffset: ?JSValue, byteLength: ?JSValue) ValueOrError {
|
||||
if (!value.isNumber()) {
|
||||
if (!value.isNumber() or value.asNumber() < 0 or value.asNumber() > std.math.maxInt(usize)) {
|
||||
return .{ .err = globalThis.toInvalidArguments("ptr must be a number.", .{}) };
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ pub const xxHash3 = hashWrap(struct {
|
||||
pub const murmur32v2 = hashWrap(std.hash.murmur.Murmur2_32);
|
||||
pub const murmur32v3 = hashWrap(std.hash.murmur.Murmur3_32);
|
||||
pub const murmur64v2 = hashWrap(std.hash.murmur.Murmur2_64);
|
||||
pub const rapidhash = hashWrap(std.hash.RapidHash);
|
||||
|
||||
pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
const function = JSC.createCallback(globalThis, ZigString.static("hash"), 1, wyhash);
|
||||
@@ -42,6 +43,7 @@ pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
"murmur32v2",
|
||||
"murmur32v3",
|
||||
"murmur64v2",
|
||||
"rapidhash",
|
||||
};
|
||||
inline for (fns) |name| {
|
||||
const value = JSC.createCallback(
|
||||
|
||||
@@ -86,11 +86,11 @@ pub const JSBundler = struct {
|
||||
|
||||
// Plugins must be resolved first as they are allowed to mutate the config JSValue
|
||||
if (try config.getArray(globalThis, "plugins")) |array| {
|
||||
const length = array.getLength(globalThis);
|
||||
var iter = array.arrayIterator(globalThis);
|
||||
var onstart_promise_array: JSValue = JSValue.undefined;
|
||||
const length = try array.getLength(globalThis);
|
||||
var iter = try array.arrayIterator(globalThis);
|
||||
var onstart_promise_array: JSValue = .js_undefined;
|
||||
var i: usize = 0;
|
||||
while (iter.next()) |plugin| : (i += 1) {
|
||||
while (try iter.next()) |plugin| : (i += 1) {
|
||||
if (!plugin.isObject()) {
|
||||
return globalThis.throwInvalidArguments("Expected plugin to be an object", .{});
|
||||
}
|
||||
@@ -201,7 +201,7 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
if (try config.get(globalThis, "env")) |env| {
|
||||
if (env != .undefined) {
|
||||
if (!env.isUndefined()) {
|
||||
if (env == .null or env == .false or (env.isNumber() and env.asNumber() == 0)) {
|
||||
this.env_behavior = .disable;
|
||||
} else if (env == .true or (env.isNumber() and env.asNumber() == 1)) {
|
||||
@@ -267,8 +267,8 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
if (try config.getArray(globalThis, "entrypoints") orelse try config.getArray(globalThis, "entryPoints")) |entry_points| {
|
||||
var iter = entry_points.arrayIterator(globalThis);
|
||||
while (iter.next()) |entry_point| {
|
||||
var iter = try entry_points.arrayIterator(globalThis);
|
||||
while (try iter.next()) |entry_point| {
|
||||
var slice = try entry_point.toSliceOrNull(globalThis);
|
||||
defer slice.deinit();
|
||||
try this.entry_points.insert(slice.slice());
|
||||
@@ -291,8 +291,8 @@ pub const JSBundler = struct {
|
||||
defer slice.deinit();
|
||||
try this.conditions.insert(slice.slice());
|
||||
} else if (conditions_value.jsType().isArray()) {
|
||||
var iter = conditions_value.arrayIterator(globalThis);
|
||||
while (iter.next()) |entry_point| {
|
||||
var iter = try conditions_value.arrayIterator(globalThis);
|
||||
while (try iter.next()) |entry_point| {
|
||||
var slice = try entry_point.toSliceOrNull(globalThis);
|
||||
defer slice.deinit();
|
||||
try this.conditions.insert(slice.slice());
|
||||
@@ -332,8 +332,8 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
if (try config.getOwnArray(globalThis, "external")) |externals| {
|
||||
var iter = externals.arrayIterator(globalThis);
|
||||
while (iter.next()) |entry_point| {
|
||||
var iter = try externals.arrayIterator(globalThis);
|
||||
while (try iter.next()) |entry_point| {
|
||||
var slice = try entry_point.toSliceOrNull(globalThis);
|
||||
defer slice.deinit();
|
||||
try this.external.insert(slice.slice());
|
||||
@@ -341,8 +341,8 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
if (try config.getOwnArray(globalThis, "drop")) |drops| {
|
||||
var iter = drops.arrayIterator(globalThis);
|
||||
while (iter.next()) |entry| {
|
||||
var iter = try drops.arrayIterator(globalThis);
|
||||
while (try iter.next()) |entry| {
|
||||
var slice = try entry.toSliceOrNull(globalThis);
|
||||
defer slice.deinit();
|
||||
try this.drop.insert(slice.slice());
|
||||
@@ -782,7 +782,7 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
export fn JSBundlerPlugin__onDefer(load: *Load, global: *JSC.JSGlobalObject) JSValue {
|
||||
return JSC.toJSHostValue(global, load.onDefer(global));
|
||||
return JSC.toJSHostCall(global, @src(), Load.onDefer, .{ load, global });
|
||||
}
|
||||
fn onDefer(this: *Load, globalObject: *JSC.JSGlobalObject) bun.JSError!JSValue {
|
||||
if (this.called_defer) {
|
||||
|
||||
@@ -6,7 +6,6 @@ const JSC = bun.JSC;
|
||||
const Transpiler = bun.transpiler;
|
||||
const options = @import("../../options.zig");
|
||||
const ZigString = JSC.ZigString;
|
||||
const JSObject = JSC.JSObject;
|
||||
const JSValue = bun.JSC.JSValue;
|
||||
const JSGlobalObject = JSC.JSGlobalObject;
|
||||
const strings = bun.strings;
|
||||
@@ -354,13 +353,13 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
single_external[0] = std.fmt.allocPrint(allocator, "{}", .{external}) catch unreachable;
|
||||
transpiler.transform.external = single_external;
|
||||
} else if (toplevel_type.isArray()) {
|
||||
const count = external.getLength(globalThis);
|
||||
const count = try external.getLength(globalThis);
|
||||
if (count == 0) break :external;
|
||||
|
||||
var externals = allocator.alloc(string, count) catch unreachable;
|
||||
var iter = external.arrayIterator(globalThis);
|
||||
var iter = try external.arrayIterator(globalThis);
|
||||
var i: usize = 0;
|
||||
while (iter.next()) |entry| {
|
||||
while (try iter.next()) |entry| {
|
||||
if (!entry.jsType().isStringLike()) {
|
||||
return globalObject.throwInvalidArguments("external must be a string or string[]", .{});
|
||||
}
|
||||
@@ -419,7 +418,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
if (TSConfigJSON.parse(
|
||||
allocator,
|
||||
&transpiler.log,
|
||||
logger.Source.initPathString("tsconfig.json", transpiler.tsconfig_buf),
|
||||
&logger.Source.initPathString("tsconfig.json", transpiler.tsconfig_buf),
|
||||
&JSC.VirtualMachine.get().transpiler.resolver.caches.json,
|
||||
) catch null) |parsed_tsconfig| {
|
||||
transpiler.tsconfig = parsed_tsconfig;
|
||||
@@ -453,7 +452,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
|
||||
if (out.isEmpty()) break :macros;
|
||||
transpiler.macros_buf = out.toOwnedSlice(allocator) catch bun.outOfMemory();
|
||||
const source = logger.Source.initPathString("macros.json", transpiler.macros_buf);
|
||||
const source = &logger.Source.initPathString("macros.json", transpiler.macros_buf);
|
||||
const json = (JSC.VirtualMachine.get().transpiler.resolver.caches.json.parseJSON(
|
||||
&transpiler.log,
|
||||
source,
|
||||
@@ -461,7 +460,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
.json,
|
||||
false,
|
||||
) catch null) orelse break :macros;
|
||||
transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, &source);
|
||||
transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, source);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -550,13 +549,13 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
|
||||
var total_name_buf_len: u32 = 0;
|
||||
var string_count: u32 = 0;
|
||||
const iter = JSC.JSArrayIterator.init(eliminate, globalThis);
|
||||
const iter = try JSC.JSArrayIterator.init(eliminate, globalThis);
|
||||
{
|
||||
var length_iter = iter;
|
||||
while (length_iter.next()) |value| {
|
||||
while (try length_iter.next()) |value| {
|
||||
if (value.isString()) {
|
||||
const length = @as(u32, @truncate(value.getLength(globalThis)));
|
||||
string_count += @as(u32, @intFromBool(length > 0));
|
||||
const length: u32 = @truncate(try value.getLength(globalThis));
|
||||
string_count += @intFromBool(length > 0);
|
||||
total_name_buf_len += length;
|
||||
}
|
||||
}
|
||||
@@ -567,7 +566,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
try replacements.ensureUnusedCapacity(bun.default_allocator, string_count);
|
||||
{
|
||||
var length_iter = iter;
|
||||
while (length_iter.next()) |value| {
|
||||
while (try length_iter.next()) |value| {
|
||||
if (!value.isString()) continue;
|
||||
const str = try value.getZigString(globalThis);
|
||||
if (str.len == 0) continue;
|
||||
@@ -624,10 +623,10 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value.isObject() and value.getLength(globalObject) == 2) {
|
||||
const replacementValue = JSC.JSObject.getIndex(value, globalThis, 1);
|
||||
if (value.isObject() and try value.getLength(globalObject) == 2) {
|
||||
const replacementValue = try value.getIndex(globalThis, 1);
|
||||
if (try exportReplacementValue(replacementValue, globalThis)) |to_replace| {
|
||||
const replacementKey = JSC.JSObject.getIndex(value, globalThis, 0);
|
||||
const replacementKey = try value.getIndex(globalThis, 0);
|
||||
var slice = (try (try replacementKey.toSlice(globalThis, bun.default_allocator)).cloneIfNeeded(bun.default_allocator));
|
||||
const replacement_name = slice.slice();
|
||||
|
||||
@@ -769,7 +768,7 @@ pub fn finalize(this: *JSTranspiler) void {
|
||||
|
||||
fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []const u8, loader: ?Loader, macro_js_ctx: Transpiler.MacroJSValueType) ?Transpiler.ParseResult {
|
||||
const name = this.transpiler_options.default_loader.stdinName();
|
||||
const source = logger.Source.initPathString(name, code);
|
||||
const source = &logger.Source.initPathString(name, code);
|
||||
|
||||
const jsx = if (this.transpiler_options.tsconfig != null)
|
||||
this.transpiler_options.tsconfig.?.mergeJSX(this.transpiler.options.jsx)
|
||||
@@ -784,7 +783,7 @@ fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []con
|
||||
.loader = loader orelse this.transpiler_options.default_loader,
|
||||
.jsx = jsx,
|
||||
.path = source.path,
|
||||
.virtual_source = &source,
|
||||
.virtual_source = source,
|
||||
.replace_exports = this.transpiler_options.runtime.replace_exports,
|
||||
.macro_js_ctx = macro_js_ctx,
|
||||
// .allocator = this.
|
||||
@@ -852,12 +851,12 @@ pub fn scan(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callframe: *JS
|
||||
|
||||
const exports_label = JSC.ZigString.static("exports");
|
||||
const imports_label = JSC.ZigString.static("imports");
|
||||
const named_imports_value = namedImportsToJS(
|
||||
const named_imports_value = try namedImportsToJS(
|
||||
globalThis,
|
||||
parse_result.ast.import_records.slice(),
|
||||
);
|
||||
|
||||
const named_exports_value = namedExportsToJS(
|
||||
const named_exports_value = try namedExportsToJS(
|
||||
globalThis,
|
||||
&parse_result.ast.named_exports,
|
||||
);
|
||||
@@ -1027,7 +1026,7 @@ pub fn transformSync(
|
||||
return out.toJS(globalThis);
|
||||
}
|
||||
|
||||
fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExports) JSC.JSValue {
|
||||
fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExports) bun.JSError!JSC.JSValue {
|
||||
if (named_exports.count() == 0)
|
||||
return JSValue.createEmptyArray(global, 0);
|
||||
|
||||
@@ -1052,14 +1051,11 @@ fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExpo
|
||||
|
||||
const ImportRecord = @import("../../import_record.zig").ImportRecord;
|
||||
|
||||
fn namedImportsToJS(
|
||||
global: *JSGlobalObject,
|
||||
import_records: []const ImportRecord,
|
||||
) JSC.JSValue {
|
||||
fn namedImportsToJS(global: *JSGlobalObject, import_records: []const ImportRecord) bun.JSError!JSC.JSValue {
|
||||
const path_label = JSC.ZigString.static("path");
|
||||
const kind_label = JSC.ZigString.static("kind");
|
||||
|
||||
const array = JSC.JSValue.createEmptyArray(global, import_records.len);
|
||||
const array = try JSC.JSValue.createEmptyArray(global, import_records.len);
|
||||
array.ensureStillAlive();
|
||||
|
||||
for (import_records, 0..) |record, i| {
|
||||
@@ -1157,7 +1153,7 @@ pub fn scanImports(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callfra
|
||||
return globalThis.throwValue(try log.toJS(globalThis, globalThis.allocator(), "Failed to scan imports"));
|
||||
}
|
||||
|
||||
const named_imports_value = namedImportsToJS(
|
||||
const named_imports_value = try namedImportsToJS(
|
||||
globalThis,
|
||||
this.scan_pass_result.import_records.items,
|
||||
);
|
||||
|
||||
@@ -29,8 +29,8 @@ pub fn parse(
|
||||
|
||||
var input_slice = try arguments[0].toSlice(globalThis, bun.default_allocator);
|
||||
defer input_slice.deinit();
|
||||
var source = logger.Source.initPathString("input.toml", input_slice.slice());
|
||||
const parse_result = TOMLParser.parse(&source, &log, allocator, false) catch {
|
||||
const source = &logger.Source.initPathString("input.toml", input_slice.slice());
|
||||
const parse_result = TOMLParser.parse(source, &log, allocator, false) catch {
|
||||
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse toml"));
|
||||
};
|
||||
|
||||
@@ -41,7 +41,7 @@ pub fn parse(
|
||||
*js_printer.BufferPrinter,
|
||||
&writer,
|
||||
parse_result,
|
||||
&source,
|
||||
source,
|
||||
.{
|
||||
.mangled_props = null,
|
||||
},
|
||||
|
||||
@@ -5,13 +5,9 @@ const VirtualMachine = JSC.VirtualMachine;
|
||||
const JSValue = JSC.JSValue;
|
||||
const JSError = bun.JSError;
|
||||
const JSGlobalObject = JSC.JSGlobalObject;
|
||||
const Debugger = JSC.Debugger;
|
||||
const Environment = bun.Environment;
|
||||
const uv = bun.windows.libuv;
|
||||
const api = bun.api;
|
||||
const StatWatcherScheduler = @import("../node/node_fs_stat_watcher.zig").StatWatcherScheduler;
|
||||
const Timer = @This();
|
||||
const DNSResolver = @import("./bun/dns_resolver.zig").DNSResolver;
|
||||
|
||||
/// TimeoutMap is map of i32 to nullable Timeout structs
|
||||
/// i32 is exposed to JavaScript and can be used with clearTimeout, clearInterval, etc.
|
||||
@@ -330,8 +326,8 @@ pub const All = struct {
|
||||
globalThis.emitWarning(
|
||||
warning_string.transferToJS(globalThis),
|
||||
warning_type_string.transferToJS(globalThis),
|
||||
.undefined,
|
||||
.undefined,
|
||||
.js_undefined,
|
||||
.js_undefined,
|
||||
) catch unreachable;
|
||||
}
|
||||
|
||||
@@ -390,7 +386,7 @@ pub const All = struct {
|
||||
|
||||
const countdown_int = try vm.timer.jsValueToCountdown(global, countdown, .clamp, true);
|
||||
const wrapped_promise = promise.withAsyncContextIfNeeded(global);
|
||||
return TimeoutObject.init(global, id, .setTimeout, countdown_int, wrapped_promise, .undefined);
|
||||
return TimeoutObject.init(global, id, .setTimeout, countdown_int, wrapped_promise, .js_undefined);
|
||||
}
|
||||
|
||||
pub fn setImmediate(
|
||||
@@ -517,7 +513,7 @@ pub const All = struct {
|
||||
) JSError!JSValue {
|
||||
JSC.markBinding(@src());
|
||||
try clearTimer(id, globalThis, .setImmediate);
|
||||
return JSValue.jsUndefined();
|
||||
return .js_undefined;
|
||||
}
|
||||
pub fn clearTimeout(
|
||||
globalThis: *JSGlobalObject,
|
||||
@@ -525,7 +521,7 @@ pub const All = struct {
|
||||
) JSError!JSValue {
|
||||
JSC.markBinding(@src());
|
||||
try clearTimer(id, globalThis, .setTimeout);
|
||||
return JSValue.jsUndefined();
|
||||
return .js_undefined;
|
||||
}
|
||||
pub fn clearInterval(
|
||||
globalThis: *JSGlobalObject,
|
||||
@@ -533,7 +529,7 @@ pub const All = struct {
|
||||
) JSError!JSValue {
|
||||
JSC.markBinding(@src());
|
||||
try clearTimer(id, globalThis, .setInterval);
|
||||
return JSValue.jsUndefined();
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
comptime {
|
||||
@@ -548,699 +544,11 @@ pub const All = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const uws = bun.uws;
|
||||
pub const EventLoopTimer = @import("./Timer/EventLoopTimer.zig");
|
||||
|
||||
pub const TimeoutObject = struct {
|
||||
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
pub const js = JSC.Codegen.JSTimeout;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
ref_count: RefCount,
|
||||
event_loop_timer: EventLoopTimer = .{
|
||||
.next = .{},
|
||||
.tag = .TimeoutObject,
|
||||
},
|
||||
internals: TimerObjectInternals,
|
||||
|
||||
pub fn init(
|
||||
globalThis: *JSGlobalObject,
|
||||
id: i32,
|
||||
kind: Kind,
|
||||
interval: u31,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) JSValue {
|
||||
// internals are initialized by init()
|
||||
const timeout = bun.new(TimeoutObject, .{ .ref_count = .init(), .internals = undefined });
|
||||
const js_value = timeout.toJS(globalThis);
|
||||
defer js_value.ensureStillAlive();
|
||||
timeout.internals.init(
|
||||
js_value,
|
||||
globalThis,
|
||||
id,
|
||||
kind,
|
||||
interval,
|
||||
callback,
|
||||
arguments,
|
||||
);
|
||||
|
||||
if (globalThis.bunVM().isInspectorEnabled()) {
|
||||
Debugger.didScheduleAsyncCall(
|
||||
globalThis,
|
||||
.DOMTimer,
|
||||
ID.asyncID(.{ .id = id, .kind = kind.big() }),
|
||||
kind != .setInterval,
|
||||
);
|
||||
}
|
||||
|
||||
return js_value;
|
||||
}
|
||||
|
||||
fn deinit(this: *TimeoutObject) void {
|
||||
this.internals.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*TimeoutObject {
|
||||
_ = callFrame;
|
||||
return globalObject.throw("Timeout is not constructible", .{});
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.toPrimitive();
|
||||
}
|
||||
|
||||
pub fn doRef(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRef(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doUnref(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doRefresh(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRefresh(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.hasRef();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TimeoutObject) void {
|
||||
this.internals.finalize();
|
||||
}
|
||||
|
||||
pub fn getDestroyed(this: *TimeoutObject, globalThis: *JSGlobalObject) JSValue {
|
||||
_ = globalThis;
|
||||
return .jsBoolean(this.internals.getDestroyed());
|
||||
}
|
||||
|
||||
pub fn close(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return callFrame.this();
|
||||
}
|
||||
|
||||
pub fn get_onTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.callbackGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_onTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.callbackSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn get_idleTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.idleTimeoutGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_idleTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.idleTimeoutSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn get_repeat(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.repeatGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_repeat(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.repeatSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn dispose(this: *TimeoutObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return .undefined;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ImmediateObject = struct {
|
||||
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
pub const js = JSC.Codegen.JSImmediate;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
ref_count: RefCount,
|
||||
event_loop_timer: EventLoopTimer = .{
|
||||
.next = .{},
|
||||
.tag = .ImmediateObject,
|
||||
},
|
||||
internals: TimerObjectInternals,
|
||||
|
||||
pub fn init(
|
||||
globalThis: *JSGlobalObject,
|
||||
id: i32,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) JSValue {
|
||||
// internals are initialized by init()
|
||||
const immediate = bun.new(ImmediateObject, .{ .ref_count = .init(), .internals = undefined });
|
||||
const js_value = immediate.toJS(globalThis);
|
||||
defer js_value.ensureStillAlive();
|
||||
immediate.internals.init(
|
||||
js_value,
|
||||
globalThis,
|
||||
id,
|
||||
.setImmediate,
|
||||
0,
|
||||
callback,
|
||||
arguments,
|
||||
);
|
||||
|
||||
if (globalThis.bunVM().isInspectorEnabled()) {
|
||||
Debugger.didScheduleAsyncCall(
|
||||
globalThis,
|
||||
.DOMTimer,
|
||||
ID.asyncID(.{ .id = id, .kind = .setImmediate }),
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
return js_value;
|
||||
}
|
||||
|
||||
fn deinit(this: *ImmediateObject) void {
|
||||
this.internals.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*ImmediateObject {
|
||||
_ = callFrame;
|
||||
return globalObject.throw("Immediate is not constructible", .{});
|
||||
}
|
||||
|
||||
/// returns true if an exception was thrown
|
||||
pub fn runImmediateTask(this: *ImmediateObject, vm: *VirtualMachine) bool {
|
||||
return this.internals.runImmediateTask(vm);
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *ImmediateObject, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.toPrimitive();
|
||||
}
|
||||
|
||||
pub fn doRef(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRef(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doUnref(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *ImmediateObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.hasRef();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *ImmediateObject) void {
|
||||
this.internals.finalize();
|
||||
}
|
||||
|
||||
pub fn getDestroyed(this: *ImmediateObject, globalThis: *JSGlobalObject) JSValue {
|
||||
_ = globalThis;
|
||||
return .jsBoolean(this.internals.getDestroyed());
|
||||
}
|
||||
|
||||
pub fn dispose(this: *ImmediateObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return .undefined;
|
||||
}
|
||||
};
|
||||
|
||||
/// Data that TimerObject and ImmediateObject have in common
|
||||
pub const TimerObjectInternals = struct {
|
||||
/// Identifier for this timer that is exposed to JavaScript (by `+timer`)
|
||||
id: i32 = -1,
|
||||
interval: u31 = 0,
|
||||
strong_this: JSC.Strong.Optional = .empty,
|
||||
flags: Flags = .{},
|
||||
|
||||
const Flags = packed struct(u32) {
|
||||
/// Whenever a timer is inserted into the heap (which happen on creation or refresh), the global
|
||||
/// epoch is incremented and the new epoch is set on the timer. For timers created by
|
||||
/// JavaScript, the epoch is used to break ties between timers scheduled for the same
|
||||
/// millisecond. This ensures that if you set two timers for the same amount of time, and
|
||||
/// refresh the first one, the first one will fire last. This mimics Node.js's behavior where
|
||||
/// the refreshed timer will be inserted at the end of a list, which makes it fire later.
|
||||
epoch: u25 = 0,
|
||||
|
||||
kind: Kind = .setTimeout,
|
||||
|
||||
// we do not allow the timer to be refreshed after we call clearInterval/clearTimeout
|
||||
has_cleared_timer: bool = false,
|
||||
is_keeping_event_loop_alive: bool = false,
|
||||
|
||||
// if they never access the timer by integer, don't create a hashmap entry.
|
||||
has_accessed_primitive: bool = false,
|
||||
|
||||
has_js_ref: bool = true,
|
||||
|
||||
/// Set to `true` only during execution of the JavaScript function so that `_destroyed` can be
|
||||
/// false during the callback, even though the `state` will be `FIRED`.
|
||||
in_callback: bool = false,
|
||||
};
|
||||
|
||||
fn eventLoopTimer(this: *TimerObjectInternals) *EventLoopTimer {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => {
|
||||
const parent: *ImmediateObject = @fieldParentPtr("internals", this);
|
||||
assert(parent.event_loop_timer.tag == .ImmediateObject);
|
||||
return &parent.event_loop_timer;
|
||||
},
|
||||
.setTimeout, .setInterval => {
|
||||
const parent: *TimeoutObject = @fieldParentPtr("internals", this);
|
||||
assert(parent.event_loop_timer.tag == .TimeoutObject);
|
||||
return &parent.event_loop_timer;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn ref(this: *TimerObjectInternals) void {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).ref(),
|
||||
.setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).ref(),
|
||||
}
|
||||
}
|
||||
|
||||
fn deref(this: *TimerObjectInternals) void {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).deref(),
|
||||
.setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).deref(),
|
||||
}
|
||||
}
|
||||
|
||||
extern "c" fn Bun__JSTimeout__call(globalObject: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue) bool;
|
||||
|
||||
/// returns true if an exception was thrown
|
||||
pub fn runImmediateTask(this: *TimerObjectInternals, vm: *VirtualMachine) bool {
|
||||
if (this.flags.has_cleared_timer or
|
||||
// unref'd setImmediate callbacks should only run if there are things keeping the event
|
||||
// loop alive other than setImmediates
|
||||
(!this.flags.is_keeping_event_loop_alive and !vm.isEventLoopAliveExcludingImmediates()))
|
||||
{
|
||||
this.deref();
|
||||
return false;
|
||||
}
|
||||
|
||||
const timer = this.strong_this.get() orelse {
|
||||
if (Environment.isDebug) {
|
||||
@panic("TimerObjectInternals.runImmediateTask: this_object is null");
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const globalThis = vm.global;
|
||||
this.strong_this.deinit();
|
||||
this.eventLoopTimer().state = .FIRED;
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
|
||||
vm.eventLoop().enter();
|
||||
const callback = ImmediateObject.js.callbackGetCached(timer).?;
|
||||
const arguments = ImmediateObject.js.argumentsGetCached(timer).?;
|
||||
this.ref();
|
||||
const exception_thrown = this.run(globalThis, timer, callback, arguments, this.asyncID(), vm);
|
||||
this.deref();
|
||||
|
||||
if (this.eventLoopTimer().state == .FIRED) {
|
||||
this.deref();
|
||||
}
|
||||
|
||||
vm.eventLoop().exitMaybeDrainMicrotasks(!exception_thrown);
|
||||
|
||||
return exception_thrown;
|
||||
}
|
||||
|
||||
pub fn asyncID(this: *const TimerObjectInternals) u64 {
|
||||
return ID.asyncID(.{ .id = this.id, .kind = this.flags.kind.big() });
|
||||
}
|
||||
|
||||
pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *JSC.VirtualMachine) EventLoopTimer.Arm {
|
||||
const id = this.id;
|
||||
const kind = this.flags.kind.big();
|
||||
const async_id: ID = .{ .id = id, .kind = kind };
|
||||
const has_been_cleared = this.eventLoopTimer().state == .CANCELLED or this.flags.has_cleared_timer or vm.scriptExecutionStatus() != .running;
|
||||
|
||||
this.eventLoopTimer().state = .FIRED;
|
||||
|
||||
const globalThis = vm.global;
|
||||
const this_object = this.strong_this.get().?;
|
||||
|
||||
const callback, const arguments, var idle_timeout, var repeat = switch (kind) {
|
||||
.setImmediate => .{
|
||||
ImmediateObject.js.callbackGetCached(this_object).?,
|
||||
ImmediateObject.js.argumentsGetCached(this_object).?,
|
||||
|
||||
.undefined,
|
||||
.undefined,
|
||||
},
|
||||
.setTimeout, .setInterval => .{
|
||||
TimeoutObject.js.callbackGetCached(this_object).?,
|
||||
TimeoutObject.js.argumentsGetCached(this_object).?,
|
||||
TimeoutObject.js.idleTimeoutGetCached(this_object).?,
|
||||
TimeoutObject.js.repeatGetCached(this_object).?,
|
||||
},
|
||||
};
|
||||
|
||||
if (has_been_cleared or !callback.toBoolean()) {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.didCancelAsyncCall(globalThis, .DOMTimer, ID.asyncID(async_id));
|
||||
}
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
this.flags.has_cleared_timer = true;
|
||||
this.strong_this.deinit();
|
||||
this.deref();
|
||||
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
var time_before_call: timespec = undefined;
|
||||
|
||||
if (kind != .setInterval) {
|
||||
this.strong_this.clearWithoutDeallocation();
|
||||
} else {
|
||||
time_before_call = timespec.msFromNow(this.interval);
|
||||
}
|
||||
this_object.ensureStillAlive();
|
||||
|
||||
vm.eventLoop().enter();
|
||||
{
|
||||
// Ensure it stays alive for this scope.
|
||||
this.ref();
|
||||
defer this.deref();
|
||||
|
||||
_ = this.run(globalThis, this_object, callback, arguments, ID.asyncID(async_id), vm);
|
||||
|
||||
switch (kind) {
|
||||
.setTimeout, .setInterval => {
|
||||
idle_timeout = TimeoutObject.js.idleTimeoutGetCached(this_object).?;
|
||||
repeat = TimeoutObject.js.repeatGetCached(this_object).?;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
const is_timer_done = is_timer_done: {
|
||||
// Node doesn't drain microtasks after each timer callback.
|
||||
if (kind == .setInterval) {
|
||||
if (!this.shouldRescheduleTimer(repeat, idle_timeout)) {
|
||||
break :is_timer_done true;
|
||||
}
|
||||
switch (this.eventLoopTimer().state) {
|
||||
.FIRED => {
|
||||
// If we didn't clear the setInterval, reschedule it starting from
|
||||
vm.timer.update(this.eventLoopTimer(), &time_before_call);
|
||||
|
||||
if (this.flags.has_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
}
|
||||
|
||||
// The ref count doesn't change. It wasn't decremented.
|
||||
},
|
||||
.ACTIVE => {
|
||||
// The developer called timer.refresh() synchronously in the callback.
|
||||
vm.timer.update(this.eventLoopTimer(), &time_before_call);
|
||||
|
||||
// Balance out the ref count.
|
||||
// the transition from "FIRED" -> "ACTIVE" caused it to increment.
|
||||
this.deref();
|
||||
},
|
||||
else => {
|
||||
break :is_timer_done true;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
if (kind == .setTimeout and !repeat.isNull()) {
|
||||
if (idle_timeout.getNumber()) |num| {
|
||||
if (num != -1) {
|
||||
this.convertToInterval(globalThis, this_object, repeat);
|
||||
break :is_timer_done false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.eventLoopTimer().state == .FIRED) {
|
||||
break :is_timer_done true;
|
||||
}
|
||||
}
|
||||
|
||||
break :is_timer_done false;
|
||||
};
|
||||
|
||||
if (is_timer_done) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
// The timer will not be re-entered into the event loop at this point.
|
||||
this.deref();
|
||||
}
|
||||
}
|
||||
vm.eventLoop().exit();
|
||||
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
fn convertToInterval(this: *TimerObjectInternals, global: *JSGlobalObject, timer: JSValue, repeat: JSValue) void {
|
||||
bun.debugAssert(this.flags.kind == .setTimeout);
|
||||
|
||||
const vm = global.bunVM();
|
||||
|
||||
const new_interval: u31 = if (repeat.getNumber()) |num| if (num < 1 or num > std.math.maxInt(u31)) 1 else @intFromFloat(num) else 1;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L613
|
||||
TimeoutObject.js.idleTimeoutSetCached(timer, global, repeat);
|
||||
this.strong_this.set(global, timer);
|
||||
this.flags.kind = .setInterval;
|
||||
this.interval = new_interval;
|
||||
this.reschedule(timer, vm);
|
||||
}
|
||||
|
||||
pub fn run(this: *TimerObjectInternals, globalThis: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue, async_id: u64, vm: *JSC.VirtualMachine) bool {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.willDispatchAsyncCall(globalThis, .DOMTimer, async_id);
|
||||
}
|
||||
|
||||
defer {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.didDispatchAsyncCall(globalThis, .DOMTimer, async_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Bun__JSTimeout__call handles exceptions.
|
||||
this.flags.in_callback = true;
|
||||
defer this.flags.in_callback = false;
|
||||
return Bun__JSTimeout__call(globalThis, timer, callback, arguments);
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
this: *TimerObjectInternals,
|
||||
timer: JSValue,
|
||||
global: *JSGlobalObject,
|
||||
id: i32,
|
||||
kind: Kind,
|
||||
interval: u31,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) void {
|
||||
const vm = global.bunVM();
|
||||
this.* = .{
|
||||
.id = id,
|
||||
.flags = .{ .kind = kind, .epoch = vm.timer.epoch },
|
||||
.interval = interval,
|
||||
};
|
||||
|
||||
if (kind == .setImmediate) {
|
||||
ImmediateObject.js.argumentsSetCached(timer, global, arguments);
|
||||
ImmediateObject.js.callbackSetCached(timer, global, callback);
|
||||
const parent: *ImmediateObject = @fieldParentPtr("internals", this);
|
||||
vm.enqueueImmediateTask(parent);
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
// ref'd by event loop
|
||||
parent.ref();
|
||||
} else {
|
||||
TimeoutObject.js.argumentsSetCached(timer, global, arguments);
|
||||
TimeoutObject.js.callbackSetCached(timer, global, callback);
|
||||
TimeoutObject.js.idleTimeoutSetCached(timer, global, JSC.jsNumber(interval));
|
||||
TimeoutObject.js.repeatSetCached(timer, global, if (kind == .setInterval) JSC.jsNumber(interval) else .null);
|
||||
|
||||
// this increments the refcount
|
||||
this.reschedule(timer, vm);
|
||||
}
|
||||
|
||||
this.strong_this.set(global, timer);
|
||||
}
|
||||
|
||||
pub fn doRef(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
this_value.ensureStillAlive();
|
||||
|
||||
const did_have_js_ref = this.flags.has_js_ref;
|
||||
this.flags.has_js_ref = true;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L256
|
||||
// and
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L685-L687
|
||||
if (!did_have_js_ref and !this.flags.has_cleared_timer) {
|
||||
this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), true);
|
||||
}
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn doRefresh(this: *TimerObjectInternals, globalObject: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
// Immediates do not have a refresh function, and our binding generator should not let this
|
||||
// function be reached even if you override the `this` value calling a Timeout object's
|
||||
// `refresh` method
|
||||
assert(this.flags.kind != .setImmediate);
|
||||
|
||||
// setImmediate does not support refreshing and we do not support refreshing after cleanup
|
||||
if (this.id == -1 or this.flags.kind == .setImmediate or this.flags.has_cleared_timer) {
|
||||
return this_value;
|
||||
}
|
||||
|
||||
this.strong_this.set(globalObject, this_value);
|
||||
this.reschedule(this_value, VirtualMachine.get());
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
this_value.ensureStillAlive();
|
||||
|
||||
const did_have_js_ref = this.flags.has_js_ref;
|
||||
this.flags.has_js_ref = false;
|
||||
|
||||
if (did_have_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), false);
|
||||
}
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn cancel(this: *TimerObjectInternals, vm: *VirtualMachine) void {
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
this.flags.has_cleared_timer = true;
|
||||
|
||||
if (this.flags.kind == .setImmediate) return;
|
||||
|
||||
const was_active = this.eventLoopTimer().state == .ACTIVE;
|
||||
|
||||
this.eventLoopTimer().state = .CANCELLED;
|
||||
this.strong_this.deinit();
|
||||
|
||||
if (was_active) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
this.deref();
|
||||
}
|
||||
}
|
||||
|
||||
fn shouldRescheduleTimer(this: *TimerObjectInternals, repeat: JSValue, idle_timeout: JSValue) bool {
|
||||
if (this.flags.kind == .setInterval and repeat.isNull()) return false;
|
||||
if (idle_timeout.getNumber()) |num| {
|
||||
if (num == -1) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn reschedule(this: *TimerObjectInternals, timer: JSValue, vm: *VirtualMachine) void {
|
||||
if (this.flags.kind == .setImmediate) return;
|
||||
|
||||
const idle_timeout = TimeoutObject.js.idleTimeoutGetCached(timer).?;
|
||||
const repeat = TimeoutObject.js.repeatGetCached(timer).?;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L612
|
||||
if (!this.shouldRescheduleTimer(repeat, idle_timeout)) return;
|
||||
|
||||
const now = timespec.msFromNow(this.interval);
|
||||
const was_active = this.eventLoopTimer().state == .ACTIVE;
|
||||
if (was_active) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
} else {
|
||||
this.ref();
|
||||
}
|
||||
|
||||
vm.timer.update(this.eventLoopTimer(), &now);
|
||||
this.flags.has_cleared_timer = false;
|
||||
|
||||
if (this.flags.has_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
}
|
||||
}
|
||||
|
||||
fn setEnableKeepingEventLoopAlive(this: *TimerObjectInternals, vm: *VirtualMachine, enable: bool) void {
|
||||
if (this.flags.is_keeping_event_loop_alive == enable) {
|
||||
return;
|
||||
}
|
||||
this.flags.is_keeping_event_loop_alive = enable;
|
||||
switch (this.flags.kind) {
|
||||
.setTimeout, .setInterval => vm.timer.incrementTimerRef(if (enable) 1 else -1),
|
||||
|
||||
// setImmediate has slightly different event loop logic
|
||||
.setImmediate => vm.timer.incrementImmediateRef(if (enable) 1 else -1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *TimerObjectInternals) JSValue {
|
||||
return JSValue.jsBoolean(this.flags.is_keeping_event_loop_alive);
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *TimerObjectInternals) bun.JSError!JSValue {
|
||||
if (!this.flags.has_accessed_primitive) {
|
||||
this.flags.has_accessed_primitive = true;
|
||||
const vm = VirtualMachine.get();
|
||||
try vm.timer.maps.get(this.flags.kind).put(bun.default_allocator, this.id, this.eventLoopTimer());
|
||||
}
|
||||
return JSValue.jsNumber(this.id);
|
||||
}
|
||||
|
||||
/// This is the getter for `_destroyed` on JS Timeout and Immediate objects
|
||||
pub fn getDestroyed(this: *TimerObjectInternals) bool {
|
||||
if (this.flags.has_cleared_timer) {
|
||||
return true;
|
||||
}
|
||||
if (this.flags.in_callback) {
|
||||
return false;
|
||||
}
|
||||
return switch (this.eventLoopTimer().state) {
|
||||
.ACTIVE, .PENDING => false,
|
||||
.FIRED, .CANCELLED => true,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TimerObjectInternals) void {
|
||||
this.strong_this.deinit();
|
||||
this.deref();
|
||||
}
|
||||
|
||||
pub fn deinit(this: *TimerObjectInternals) void {
|
||||
this.strong_this.deinit();
|
||||
const vm = VirtualMachine.get();
|
||||
const kind = this.flags.kind;
|
||||
|
||||
if (this.eventLoopTimer().state == .ACTIVE) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
}
|
||||
|
||||
if (this.flags.has_accessed_primitive) {
|
||||
const map = vm.timer.maps.get(kind);
|
||||
if (map.orderedRemove(this.id)) {
|
||||
// If this array gets large, let's shrink it down
|
||||
// Array keys are i32
|
||||
// Values are 1 ptr
|
||||
// Therefore, 12 bytes per entry
|
||||
// So if you created 21,000 timers and accessed them by ID, you'd be using 252KB
|
||||
const allocated_bytes = map.capacity() * @sizeOf(TimeoutMap.Data);
|
||||
const used_bytes = map.count() * @sizeOf(TimeoutMap.Data);
|
||||
if (allocated_bytes - used_bytes > 256 * 1024) {
|
||||
map.shrinkAndFree(bun.default_allocator, map.count() + 8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
switch (kind) {
|
||||
.setImmediate => (@as(*ImmediateObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(),
|
||||
.setTimeout, .setInterval => (@as(*TimeoutObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(),
|
||||
}
|
||||
}
|
||||
};
|
||||
pub const TimeoutObject = @import("./Timer/TimeoutObject.zig");
|
||||
pub const ImmediateObject = @import("./Timer/ImmediateObject.zig");
|
||||
pub const TimerObjectInternals = @import("./Timer/TimerObjectInternals.zig");
|
||||
|
||||
pub const Kind = enum(u2) {
|
||||
setTimeout = 0,
|
||||
@@ -1276,235 +584,6 @@ pub const ID = extern struct {
|
||||
const assert = bun.assert;
|
||||
const heap = bun.io.heap;
|
||||
|
||||
pub const EventLoopTimer = struct {
|
||||
/// The absolute time to fire this timer next.
|
||||
next: timespec,
|
||||
state: State = .PENDING,
|
||||
tag: Tag,
|
||||
/// Internal heap fields.
|
||||
heap: heap.IntrusiveField(EventLoopTimer) = .{},
|
||||
|
||||
pub fn initPaused(tag: Tag) EventLoopTimer {
|
||||
return .{
|
||||
.next = .{},
|
||||
.tag = tag,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn less(_: void, a: *const EventLoopTimer, b: *const EventLoopTimer) bool {
|
||||
const sec_order = std.math.order(a.next.sec, b.next.sec);
|
||||
if (sec_order != .eq) return sec_order == .lt;
|
||||
|
||||
// collapse sub-millisecond precision for JavaScript timers
|
||||
const maybe_a_internals = a.jsTimerInternals();
|
||||
const maybe_b_internals = b.jsTimerInternals();
|
||||
var a_ns = a.next.nsec;
|
||||
var b_ns = b.next.nsec;
|
||||
if (maybe_a_internals != null) a_ns = std.time.ns_per_ms * @divTrunc(a_ns, std.time.ns_per_ms);
|
||||
if (maybe_b_internals != null) b_ns = std.time.ns_per_ms * @divTrunc(b_ns, std.time.ns_per_ms);
|
||||
|
||||
const order = std.math.order(a_ns, b_ns);
|
||||
if (order == .eq) {
|
||||
if (maybe_a_internals) |a_internals| {
|
||||
if (maybe_b_internals) |b_internals| {
|
||||
// We expect that the epoch will overflow sometimes.
|
||||
// If it does, we would ideally like timers with an epoch from before the
|
||||
// overflow to be sorted *before* timers with an epoch from after the overflow
|
||||
// (even though their epoch will be numerically *larger*).
|
||||
//
|
||||
// Wrapping subtraction gives us a distance that is consistent even if one
|
||||
// epoch has overflowed and the other hasn't. If the distance from a to b is
|
||||
// small, it's likely that b is really newer than a, so we consider a less than
|
||||
// b. If the distance from a to b is large (greater than half the u25 range),
|
||||
// it's more likely that b is older than a so the true distance is from b to a.
|
||||
return b_internals.flags.epoch -% a_internals.flags.epoch < std.math.maxInt(u25) / 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return order == .lt;
|
||||
}
|
||||
|
||||
pub const Tag = if (Environment.isWindows) enum {
|
||||
TimerCallback,
|
||||
TimeoutObject,
|
||||
ImmediateObject,
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
DNSResolver,
|
||||
WindowsNamedPipe,
|
||||
WTFTimer,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
ValkeyConnectionTimeout,
|
||||
ValkeyConnectionReconnect,
|
||||
SubprocessTimeout,
|
||||
DevServerSweepSourceMaps,
|
||||
DevServerMemoryVisualizerTick,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
.TimerCallback => TimerCallback,
|
||||
.TimeoutObject => TimeoutObject,
|
||||
.ImmediateObject => ImmediateObject,
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.DNSResolver => DNSResolver,
|
||||
.WindowsNamedPipe => uws.WindowsNamedPipe,
|
||||
.WTFTimer => WTFTimer,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
.SubprocessTimeout => JSC.Subprocess,
|
||||
.ValkeyConnectionReconnect => JSC.API.Valkey,
|
||||
.ValkeyConnectionTimeout => JSC.API.Valkey,
|
||||
.DevServerSweepSourceMaps,
|
||||
.DevServerMemoryVisualizerTick,
|
||||
=> bun.bake.DevServer,
|
||||
};
|
||||
}
|
||||
} else enum {
|
||||
TimerCallback,
|
||||
TimeoutObject,
|
||||
ImmediateObject,
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
WTFTimer,
|
||||
DNSResolver,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
ValkeyConnectionTimeout,
|
||||
ValkeyConnectionReconnect,
|
||||
SubprocessTimeout,
|
||||
DevServerSweepSourceMaps,
|
||||
DevServerMemoryVisualizerTick,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
.TimerCallback => TimerCallback,
|
||||
.TimeoutObject => TimeoutObject,
|
||||
.ImmediateObject => ImmediateObject,
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.WTFTimer => WTFTimer,
|
||||
.DNSResolver => DNSResolver,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
.ValkeyConnectionTimeout => JSC.API.Valkey,
|
||||
.ValkeyConnectionReconnect => JSC.API.Valkey,
|
||||
.SubprocessTimeout => JSC.Subprocess,
|
||||
.DevServerSweepSourceMaps,
|
||||
.DevServerMemoryVisualizerTick,
|
||||
=> bun.bake.DevServer,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const TimerCallback = struct {
|
||||
callback: *const fn (*TimerCallback) Arm,
|
||||
ctx: *anyopaque,
|
||||
event_loop_timer: EventLoopTimer,
|
||||
};
|
||||
|
||||
pub const State = enum {
|
||||
/// The timer is waiting to be enabled.
|
||||
PENDING,
|
||||
|
||||
/// The timer is active and will fire at the next time.
|
||||
ACTIVE,
|
||||
|
||||
/// The timer has been cancelled and will not fire.
|
||||
CANCELLED,
|
||||
|
||||
/// The timer has fired and the callback has been called.
|
||||
FIRED,
|
||||
};
|
||||
|
||||
/// If self was created by set{Immediate,Timeout,Interval}, get a pointer to the common data
|
||||
/// for all those kinds of timers
|
||||
fn jsTimerInternals(self: anytype) switch (@TypeOf(self)) {
|
||||
*EventLoopTimer => ?*TimerObjectInternals,
|
||||
*const EventLoopTimer => ?*const TimerObjectInternals,
|
||||
else => |T| @compileError("wrong type " ++ @typeName(T) ++ " passed to jsTimerInternals"),
|
||||
} {
|
||||
switch (self.tag) {
|
||||
inline .TimeoutObject, .ImmediateObject => |tag| {
|
||||
const parent: switch (@TypeOf(self)) {
|
||||
*EventLoopTimer => *tag.Type(),
|
||||
*const EventLoopTimer => *const tag.Type(),
|
||||
else => unreachable,
|
||||
} = @fieldParentPtr("event_loop_timer", self);
|
||||
return &parent.internals;
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
|
||||
fn ns(self: *const EventLoopTimer) u64 {
|
||||
return self.next.ns();
|
||||
}
|
||||
|
||||
pub const Arm = union(enum) {
|
||||
rearm: timespec,
|
||||
disarm,
|
||||
};
|
||||
|
||||
pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm {
|
||||
switch (this.tag) {
|
||||
.PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(),
|
||||
.PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(),
|
||||
.ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(),
|
||||
.ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(),
|
||||
.DevServerMemoryVisualizerTick => return bun.bake.DevServer.emitMemoryVisualizerMessageTimer(this, now),
|
||||
.DevServerSweepSourceMaps => return bun.bake.DevServer.SourceMapStore.sweepWeakRefs(this, now),
|
||||
inline else => |t| {
|
||||
if (@FieldType(t.Type(), "event_loop_timer") != EventLoopTimer) {
|
||||
@compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'");
|
||||
}
|
||||
var container: *t.Type() = @alignCast(@fieldParentPtr("event_loop_timer", this));
|
||||
if (comptime t.Type() == TimeoutObject or t.Type() == ImmediateObject) {
|
||||
return container.internals.fire(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == WTFTimer) {
|
||||
return container.fire(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == StatWatcherScheduler) {
|
||||
return container.timerCallback();
|
||||
}
|
||||
if (comptime t.Type() == uws.UpgradedDuplex) {
|
||||
return container.onTimeout();
|
||||
}
|
||||
if (Environment.isWindows) {
|
||||
if (comptime t.Type() == uws.WindowsNamedPipe) {
|
||||
return container.onTimeout();
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime t.Type() == JSC.Jest.TestRunner) {
|
||||
container.onTestTimeout(now, vm);
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
if (comptime t.Type() == DNSResolver) {
|
||||
return container.checkTimeouts(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == JSC.Subprocess) {
|
||||
return container.timeoutCallback();
|
||||
}
|
||||
|
||||
return container.callback(container);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(_: *EventLoopTimer) void {}
|
||||
};
|
||||
|
||||
const timespec = bun.timespec;
|
||||
|
||||
/// A timer created by WTF code and invoked by Bun's event loop
|
||||
|
||||
247
src/bun.js/api/Timer/EventLoopTimer.zig
Normal file
247
src/bun.js/api/Timer/EventLoopTimer.zig
Normal file
@@ -0,0 +1,247 @@
|
||||
const EventLoopTimer = @This();
|
||||
|
||||
/// The absolute time to fire this timer next.
|
||||
next: timespec,
|
||||
state: State = .PENDING,
|
||||
tag: Tag,
|
||||
/// Internal heap fields.
|
||||
heap: bun.io.heap.IntrusiveField(EventLoopTimer) = .{},
|
||||
|
||||
pub fn initPaused(tag: Tag) EventLoopTimer {
|
||||
return .{
|
||||
.next = .{},
|
||||
.tag = tag,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn less(_: void, a: *const EventLoopTimer, b: *const EventLoopTimer) bool {
|
||||
const sec_order = std.math.order(a.next.sec, b.next.sec);
|
||||
if (sec_order != .eq) return sec_order == .lt;
|
||||
|
||||
// collapse sub-millisecond precision for JavaScript timers
|
||||
const maybe_a_internals = a.jsTimerInternals();
|
||||
const maybe_b_internals = b.jsTimerInternals();
|
||||
var a_ns = a.next.nsec;
|
||||
var b_ns = b.next.nsec;
|
||||
if (maybe_a_internals != null) a_ns = std.time.ns_per_ms * @divTrunc(a_ns, std.time.ns_per_ms);
|
||||
if (maybe_b_internals != null) b_ns = std.time.ns_per_ms * @divTrunc(b_ns, std.time.ns_per_ms);
|
||||
|
||||
const order = std.math.order(a_ns, b_ns);
|
||||
if (order == .eq) {
|
||||
if (maybe_a_internals) |a_internals| {
|
||||
if (maybe_b_internals) |b_internals| {
|
||||
// We expect that the epoch will overflow sometimes.
|
||||
// If it does, we would ideally like timers with an epoch from before the
|
||||
// overflow to be sorted *before* timers with an epoch from after the overflow
|
||||
// (even though their epoch will be numerically *larger*).
|
||||
//
|
||||
// Wrapping subtraction gives us a distance that is consistent even if one
|
||||
// epoch has overflowed and the other hasn't. If the distance from a to b is
|
||||
// small, it's likely that b is really newer than a, so we consider a less than
|
||||
// b. If the distance from a to b is large (greater than half the u25 range),
|
||||
// it's more likely that b is older than a so the true distance is from b to a.
|
||||
return b_internals.flags.epoch -% a_internals.flags.epoch < std.math.maxInt(u25) / 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return order == .lt;
|
||||
}
|
||||
|
||||
pub const Tag = if (Environment.isWindows) enum {
|
||||
TimerCallback,
|
||||
TimeoutObject,
|
||||
ImmediateObject,
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
DNSResolver,
|
||||
WindowsNamedPipe,
|
||||
WTFTimer,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
ValkeyConnectionTimeout,
|
||||
ValkeyConnectionReconnect,
|
||||
SubprocessTimeout,
|
||||
DevServerSweepSourceMaps,
|
||||
DevServerMemoryVisualizerTick,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
.TimerCallback => TimerCallback,
|
||||
.TimeoutObject => TimeoutObject,
|
||||
.ImmediateObject => ImmediateObject,
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.DNSResolver => DNSResolver,
|
||||
.WindowsNamedPipe => uws.WindowsNamedPipe,
|
||||
.WTFTimer => WTFTimer,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
.SubprocessTimeout => JSC.Subprocess,
|
||||
.ValkeyConnectionReconnect => JSC.API.Valkey,
|
||||
.ValkeyConnectionTimeout => JSC.API.Valkey,
|
||||
.DevServerSweepSourceMaps,
|
||||
.DevServerMemoryVisualizerTick,
|
||||
=> bun.bake.DevServer,
|
||||
};
|
||||
}
|
||||
} else enum {
|
||||
TimerCallback,
|
||||
TimeoutObject,
|
||||
ImmediateObject,
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
WTFTimer,
|
||||
DNSResolver,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
ValkeyConnectionTimeout,
|
||||
ValkeyConnectionReconnect,
|
||||
SubprocessTimeout,
|
||||
DevServerSweepSourceMaps,
|
||||
DevServerMemoryVisualizerTick,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
.TimerCallback => TimerCallback,
|
||||
.TimeoutObject => TimeoutObject,
|
||||
.ImmediateObject => ImmediateObject,
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.WTFTimer => WTFTimer,
|
||||
.DNSResolver => DNSResolver,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
.ValkeyConnectionTimeout => JSC.API.Valkey,
|
||||
.ValkeyConnectionReconnect => JSC.API.Valkey,
|
||||
.SubprocessTimeout => JSC.Subprocess,
|
||||
.DevServerSweepSourceMaps,
|
||||
.DevServerMemoryVisualizerTick,
|
||||
=> bun.bake.DevServer,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const TimerCallback = struct {
|
||||
callback: *const fn (*TimerCallback) Arm,
|
||||
ctx: *anyopaque,
|
||||
event_loop_timer: EventLoopTimer,
|
||||
};
|
||||
|
||||
pub const State = enum {
|
||||
/// The timer is waiting to be enabled.
|
||||
PENDING,
|
||||
|
||||
/// The timer is active and will fire at the next time.
|
||||
ACTIVE,
|
||||
|
||||
/// The timer has been cancelled and will not fire.
|
||||
CANCELLED,
|
||||
|
||||
/// The timer has fired and the callback has been called.
|
||||
FIRED,
|
||||
};
|
||||
|
||||
/// If self was created by set{Immediate,Timeout,Interval}, get a pointer to the common data
|
||||
/// for all those kinds of timers
|
||||
pub fn jsTimerInternals(self: anytype) switch (@TypeOf(self)) {
|
||||
*EventLoopTimer => ?*TimerObjectInternals,
|
||||
*const EventLoopTimer => ?*const TimerObjectInternals,
|
||||
else => |T| @compileError("wrong type " ++ @typeName(T) ++ " passed to jsTimerInternals"),
|
||||
} {
|
||||
switch (self.tag) {
|
||||
inline .TimeoutObject, .ImmediateObject => |tag| {
|
||||
const parent: switch (@TypeOf(self)) {
|
||||
*EventLoopTimer => *tag.Type(),
|
||||
*const EventLoopTimer => *const tag.Type(),
|
||||
else => unreachable,
|
||||
} = @fieldParentPtr("event_loop_timer", self);
|
||||
return &parent.internals;
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
|
||||
fn ns(self: *const EventLoopTimer) u64 {
|
||||
return self.next.ns();
|
||||
}
|
||||
|
||||
pub const Arm = union(enum) {
|
||||
rearm: timespec,
|
||||
disarm,
|
||||
};
|
||||
|
||||
pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm {
|
||||
switch (this.tag) {
|
||||
.PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(),
|
||||
.PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(),
|
||||
.ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(),
|
||||
.ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(),
|
||||
.DevServerMemoryVisualizerTick => return bun.bake.DevServer.emitMemoryVisualizerMessageTimer(this, now),
|
||||
.DevServerSweepSourceMaps => return bun.bake.DevServer.SourceMapStore.sweepWeakRefs(this, now),
|
||||
inline else => |t| {
|
||||
if (@FieldType(t.Type(), "event_loop_timer") != EventLoopTimer) {
|
||||
@compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'");
|
||||
}
|
||||
var container: *t.Type() = @alignCast(@fieldParentPtr("event_loop_timer", this));
|
||||
if (comptime t.Type() == TimeoutObject or t.Type() == ImmediateObject) {
|
||||
return container.internals.fire(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == WTFTimer) {
|
||||
return container.fire(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == StatWatcherScheduler) {
|
||||
return container.timerCallback();
|
||||
}
|
||||
if (comptime t.Type() == uws.UpgradedDuplex) {
|
||||
return container.onTimeout();
|
||||
}
|
||||
if (Environment.isWindows) {
|
||||
if (comptime t.Type() == uws.WindowsNamedPipe) {
|
||||
return container.onTimeout();
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime t.Type() == JSC.Jest.TestRunner) {
|
||||
container.onTestTimeout(now, vm);
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
if (comptime t.Type() == DNSResolver) {
|
||||
return container.checkTimeouts(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == JSC.Subprocess) {
|
||||
return container.timeoutCallback();
|
||||
}
|
||||
|
||||
return container.callback(container);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(_: *EventLoopTimer) void {}
|
||||
|
||||
const timespec = bun.timespec;
|
||||
|
||||
/// A timer created by WTF code and invoked by Bun's event loop
|
||||
const WTFTimer = @import("../../WTFTimer.zig");
|
||||
const VirtualMachine = JSC.VirtualMachine;
|
||||
const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals;
|
||||
const TimeoutObject = @import("../Timer.zig").TimeoutObject;
|
||||
const ImmediateObject = @import("../Timer.zig").ImmediateObject;
|
||||
const StatWatcherScheduler = @import("../../node/node_fs_stat_watcher.zig").StatWatcherScheduler;
|
||||
const DNSResolver = @import("../bun/dns_resolver.zig").DNSResolver;
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const Environment = bun.Environment;
|
||||
const JSC = bun.JSC;
|
||||
|
||||
const uws = bun.uws;
|
||||
const api = JSC.API;
|
||||
104
src/bun.js/api/Timer/ImmediateObject.zig
Normal file
104
src/bun.js/api/Timer/ImmediateObject.zig
Normal file
@@ -0,0 +1,104 @@
|
||||
const ImmediateObject = @This();
|
||||
|
||||
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
pub const js = JSC.Codegen.JSImmediate;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
ref_count: RefCount,
|
||||
event_loop_timer: EventLoopTimer = .{
|
||||
.next = .{},
|
||||
.tag = .ImmediateObject,
|
||||
},
|
||||
internals: TimerObjectInternals,
|
||||
|
||||
pub fn init(
|
||||
globalThis: *JSGlobalObject,
|
||||
id: i32,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) JSValue {
|
||||
// internals are initialized by init()
|
||||
const immediate = bun.new(ImmediateObject, .{ .ref_count = .init(), .internals = undefined });
|
||||
const js_value = immediate.toJS(globalThis);
|
||||
defer js_value.ensureStillAlive();
|
||||
immediate.internals.init(
|
||||
js_value,
|
||||
globalThis,
|
||||
id,
|
||||
.setImmediate,
|
||||
0,
|
||||
callback,
|
||||
arguments,
|
||||
);
|
||||
|
||||
if (globalThis.bunVM().isInspectorEnabled()) {
|
||||
Debugger.didScheduleAsyncCall(
|
||||
globalThis,
|
||||
.DOMTimer,
|
||||
ID.asyncID(.{ .id = id, .kind = .setImmediate }),
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
return js_value;
|
||||
}
|
||||
|
||||
fn deinit(this: *ImmediateObject) void {
|
||||
this.internals.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*ImmediateObject {
|
||||
_ = callFrame;
|
||||
return globalObject.throw("Immediate is not constructible", .{});
|
||||
}
|
||||
|
||||
/// returns true if an exception was thrown
|
||||
pub fn runImmediateTask(this: *ImmediateObject, vm: *VirtualMachine) bool {
|
||||
return this.internals.runImmediateTask(vm);
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *ImmediateObject, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.toPrimitive();
|
||||
}
|
||||
|
||||
pub fn doRef(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRef(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doUnref(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *ImmediateObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.hasRef();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *ImmediateObject) void {
|
||||
this.internals.finalize();
|
||||
}
|
||||
|
||||
pub fn getDestroyed(this: *ImmediateObject, globalThis: *JSGlobalObject) JSValue {
|
||||
_ = globalThis;
|
||||
return .jsBoolean(this.internals.getDestroyed());
|
||||
}
|
||||
|
||||
pub fn dispose(this: *ImmediateObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const VirtualMachine = JSC.VirtualMachine;
|
||||
const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals;
|
||||
const Debugger = @import("../../Debugger.zig");
|
||||
const ID = @import("../Timer.zig").ID;
|
||||
const EventLoopTimer = @import("../Timer.zig").EventLoopTimer;
|
||||
const JSValue = JSC.JSValue;
|
||||
const JSGlobalObject = JSC.JSGlobalObject;
|
||||
134
src/bun.js/api/Timer/TimeoutObject.zig
Normal file
134
src/bun.js/api/Timer/TimeoutObject.zig
Normal file
@@ -0,0 +1,134 @@
|
||||
const TimeoutObject = @This();
|
||||
|
||||
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
pub const js = JSC.Codegen.JSTimeout;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
ref_count: RefCount,
|
||||
event_loop_timer: EventLoopTimer = .{
|
||||
.next = .{},
|
||||
.tag = .TimeoutObject,
|
||||
},
|
||||
internals: TimerObjectInternals,
|
||||
|
||||
pub fn init(
|
||||
globalThis: *JSGlobalObject,
|
||||
id: i32,
|
||||
kind: Kind,
|
||||
interval: u31,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) JSValue {
|
||||
// internals are initialized by init()
|
||||
const timeout = bun.new(TimeoutObject, .{ .ref_count = .init(), .internals = undefined });
|
||||
const js_value = timeout.toJS(globalThis);
|
||||
defer js_value.ensureStillAlive();
|
||||
timeout.internals.init(
|
||||
js_value,
|
||||
globalThis,
|
||||
id,
|
||||
kind,
|
||||
interval,
|
||||
callback,
|
||||
arguments,
|
||||
);
|
||||
|
||||
if (globalThis.bunVM().isInspectorEnabled()) {
|
||||
Debugger.didScheduleAsyncCall(
|
||||
globalThis,
|
||||
.DOMTimer,
|
||||
ID.asyncID(.{ .id = id, .kind = kind.big() }),
|
||||
kind != .setInterval,
|
||||
);
|
||||
}
|
||||
|
||||
return js_value;
|
||||
}
|
||||
|
||||
fn deinit(this: *TimeoutObject) void {
|
||||
this.internals.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*TimeoutObject {
|
||||
_ = callFrame;
|
||||
return globalObject.throw("Timeout is not constructible", .{});
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.toPrimitive();
|
||||
}
|
||||
|
||||
pub fn doRef(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRef(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doUnref(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn doRefresh(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.doRefresh(globalThis, callFrame.this());
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return this.internals.hasRef();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TimeoutObject) void {
|
||||
this.internals.finalize();
|
||||
}
|
||||
|
||||
pub fn getDestroyed(this: *TimeoutObject, globalThis: *JSGlobalObject) JSValue {
|
||||
_ = globalThis;
|
||||
return .jsBoolean(this.internals.getDestroyed());
|
||||
}
|
||||
|
||||
pub fn close(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return callFrame.this();
|
||||
}
|
||||
|
||||
pub fn get_onTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.callbackGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_onTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.callbackSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn get_idleTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.idleTimeoutGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_idleTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.idleTimeoutSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn get_repeat(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue {
|
||||
return TimeoutObject.js.repeatGetCached(thisValue).?;
|
||||
}
|
||||
|
||||
pub fn set_repeat(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void {
|
||||
TimeoutObject.js.repeatSetCached(thisValue, globalThis, value);
|
||||
}
|
||||
|
||||
pub fn dispose(this: *TimeoutObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
this.internals.cancel(globalThis.bunVM());
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals;
|
||||
const Debugger = @import("../../Debugger.zig");
|
||||
const ID = @import("../Timer.zig").ID;
|
||||
const Kind = @import("../Timer.zig").Kind;
|
||||
const EventLoopTimer = @import("../Timer.zig").EventLoopTimer;
|
||||
const JSValue = JSC.JSValue;
|
||||
const JSGlobalObject = JSC.JSGlobalObject;
|
||||
487
src/bun.js/api/Timer/TimerObjectInternals.zig
Normal file
487
src/bun.js/api/Timer/TimerObjectInternals.zig
Normal file
@@ -0,0 +1,487 @@
|
||||
/// Data that TimerObject and ImmediateObject have in common
|
||||
const TimerObjectInternals = @This();
|
||||
|
||||
/// Identifier for this timer that is exposed to JavaScript (by `+timer`)
|
||||
id: i32 = -1,
|
||||
interval: u31 = 0,
|
||||
strong_this: JSC.Strong.Optional = .empty,
|
||||
flags: Flags = .{},
|
||||
|
||||
const Flags = packed struct(u32) {
|
||||
/// Whenever a timer is inserted into the heap (which happen on creation or refresh), the global
|
||||
/// epoch is incremented and the new epoch is set on the timer. For timers created by
|
||||
/// JavaScript, the epoch is used to break ties between timers scheduled for the same
|
||||
/// millisecond. This ensures that if you set two timers for the same amount of time, and
|
||||
/// refresh the first one, the first one will fire last. This mimics Node.js's behavior where
|
||||
/// the refreshed timer will be inserted at the end of a list, which makes it fire later.
|
||||
epoch: u25 = 0,
|
||||
|
||||
kind: Kind = .setTimeout,
|
||||
|
||||
// we do not allow the timer to be refreshed after we call clearInterval/clearTimeout
|
||||
has_cleared_timer: bool = false,
|
||||
is_keeping_event_loop_alive: bool = false,
|
||||
|
||||
// if they never access the timer by integer, don't create a hashmap entry.
|
||||
has_accessed_primitive: bool = false,
|
||||
|
||||
has_js_ref: bool = true,
|
||||
|
||||
/// Set to `true` only during execution of the JavaScript function so that `_destroyed` can be
|
||||
/// false during the callback, even though the `state` will be `FIRED`.
|
||||
in_callback: bool = false,
|
||||
};
|
||||
|
||||
fn eventLoopTimer(this: *TimerObjectInternals) *EventLoopTimer {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => {
|
||||
const parent: *ImmediateObject = @fieldParentPtr("internals", this);
|
||||
assert(parent.event_loop_timer.tag == .ImmediateObject);
|
||||
return &parent.event_loop_timer;
|
||||
},
|
||||
.setTimeout, .setInterval => {
|
||||
const parent: *TimeoutObject = @fieldParentPtr("internals", this);
|
||||
assert(parent.event_loop_timer.tag == .TimeoutObject);
|
||||
return &parent.event_loop_timer;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn ref(this: *TimerObjectInternals) void {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).ref(),
|
||||
.setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).ref(),
|
||||
}
|
||||
}
|
||||
|
||||
fn deref(this: *TimerObjectInternals) void {
|
||||
switch (this.flags.kind) {
|
||||
.setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).deref(),
|
||||
.setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).deref(),
|
||||
}
|
||||
}
|
||||
|
||||
extern "c" fn Bun__JSTimeout__call(globalObject: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue) bool;
|
||||
|
||||
/// returns true if an exception was thrown
|
||||
pub fn runImmediateTask(this: *TimerObjectInternals, vm: *VirtualMachine) bool {
|
||||
if (this.flags.has_cleared_timer or
|
||||
// unref'd setImmediate callbacks should only run if there are things keeping the event
|
||||
// loop alive other than setImmediates
|
||||
(!this.flags.is_keeping_event_loop_alive and !vm.isEventLoopAliveExcludingImmediates()))
|
||||
{
|
||||
this.deref();
|
||||
return false;
|
||||
}
|
||||
|
||||
const timer = this.strong_this.get() orelse {
|
||||
if (Environment.isDebug) {
|
||||
@panic("TimerObjectInternals.runImmediateTask: this_object is null");
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const globalThis = vm.global;
|
||||
this.strong_this.deinit();
|
||||
this.eventLoopTimer().state = .FIRED;
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
|
||||
vm.eventLoop().enter();
|
||||
const callback = ImmediateObject.js.callbackGetCached(timer).?;
|
||||
const arguments = ImmediateObject.js.argumentsGetCached(timer).?;
|
||||
this.ref();
|
||||
const exception_thrown = this.run(globalThis, timer, callback, arguments, this.asyncID(), vm);
|
||||
this.deref();
|
||||
|
||||
if (this.eventLoopTimer().state == .FIRED) {
|
||||
this.deref();
|
||||
}
|
||||
|
||||
vm.eventLoop().exitMaybeDrainMicrotasks(!exception_thrown) catch return true;
|
||||
|
||||
return exception_thrown;
|
||||
}
|
||||
|
||||
pub fn asyncID(this: *const TimerObjectInternals) u64 {
|
||||
return ID.asyncID(.{ .id = this.id, .kind = this.flags.kind.big() });
|
||||
}
|
||||
|
||||
pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *JSC.VirtualMachine) EventLoopTimer.Arm {
|
||||
const id = this.id;
|
||||
const kind = this.flags.kind.big();
|
||||
const async_id: ID = .{ .id = id, .kind = kind };
|
||||
const has_been_cleared = this.eventLoopTimer().state == .CANCELLED or this.flags.has_cleared_timer or vm.scriptExecutionStatus() != .running;
|
||||
|
||||
this.eventLoopTimer().state = .FIRED;
|
||||
|
||||
const globalThis = vm.global;
|
||||
const this_object = this.strong_this.get().?;
|
||||
|
||||
const callback: JSValue, const arguments: JSValue, var idle_timeout: JSValue, var repeat: JSValue = switch (kind) {
|
||||
.setImmediate => .{
|
||||
ImmediateObject.js.callbackGetCached(this_object).?,
|
||||
ImmediateObject.js.argumentsGetCached(this_object).?,
|
||||
.js_undefined,
|
||||
.js_undefined,
|
||||
},
|
||||
.setTimeout, .setInterval => .{
|
||||
TimeoutObject.js.callbackGetCached(this_object).?,
|
||||
TimeoutObject.js.argumentsGetCached(this_object).?,
|
||||
TimeoutObject.js.idleTimeoutGetCached(this_object).?,
|
||||
TimeoutObject.js.repeatGetCached(this_object).?,
|
||||
},
|
||||
};
|
||||
|
||||
if (has_been_cleared or !callback.toBoolean()) {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.didCancelAsyncCall(globalThis, .DOMTimer, ID.asyncID(async_id));
|
||||
}
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
this.flags.has_cleared_timer = true;
|
||||
this.strong_this.deinit();
|
||||
this.deref();
|
||||
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
var time_before_call: timespec = undefined;
|
||||
|
||||
if (kind != .setInterval) {
|
||||
this.strong_this.clearWithoutDeallocation();
|
||||
} else {
|
||||
time_before_call = timespec.msFromNow(this.interval);
|
||||
}
|
||||
this_object.ensureStillAlive();
|
||||
|
||||
vm.eventLoop().enter();
|
||||
{
|
||||
// Ensure it stays alive for this scope.
|
||||
this.ref();
|
||||
defer this.deref();
|
||||
|
||||
_ = this.run(globalThis, this_object, callback, arguments, ID.asyncID(async_id), vm);
|
||||
|
||||
switch (kind) {
|
||||
.setTimeout, .setInterval => {
|
||||
idle_timeout = TimeoutObject.js.idleTimeoutGetCached(this_object).?;
|
||||
repeat = TimeoutObject.js.repeatGetCached(this_object).?;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
const is_timer_done = is_timer_done: {
|
||||
// Node doesn't drain microtasks after each timer callback.
|
||||
if (kind == .setInterval) {
|
||||
if (!this.shouldRescheduleTimer(repeat, idle_timeout)) {
|
||||
break :is_timer_done true;
|
||||
}
|
||||
switch (this.eventLoopTimer().state) {
|
||||
.FIRED => {
|
||||
// If we didn't clear the setInterval, reschedule it starting from
|
||||
vm.timer.update(this.eventLoopTimer(), &time_before_call);
|
||||
|
||||
if (this.flags.has_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
}
|
||||
|
||||
// The ref count doesn't change. It wasn't decremented.
|
||||
},
|
||||
.ACTIVE => {
|
||||
// The developer called timer.refresh() synchronously in the callback.
|
||||
vm.timer.update(this.eventLoopTimer(), &time_before_call);
|
||||
|
||||
// Balance out the ref count.
|
||||
// the transition from "FIRED" -> "ACTIVE" caused it to increment.
|
||||
this.deref();
|
||||
},
|
||||
else => {
|
||||
break :is_timer_done true;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
if (kind == .setTimeout and !repeat.isNull()) {
|
||||
if (idle_timeout.getNumber()) |num| {
|
||||
if (num != -1) {
|
||||
this.convertToInterval(globalThis, this_object, repeat);
|
||||
break :is_timer_done false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.eventLoopTimer().state == .FIRED) {
|
||||
break :is_timer_done true;
|
||||
}
|
||||
}
|
||||
|
||||
break :is_timer_done false;
|
||||
};
|
||||
|
||||
if (is_timer_done) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
// The timer will not be re-entered into the event loop at this point.
|
||||
this.deref();
|
||||
}
|
||||
}
|
||||
vm.eventLoop().exit();
|
||||
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
fn convertToInterval(this: *TimerObjectInternals, global: *JSGlobalObject, timer: JSValue, repeat: JSValue) void {
|
||||
bun.debugAssert(this.flags.kind == .setTimeout);
|
||||
|
||||
const vm = global.bunVM();
|
||||
|
||||
const new_interval: u31 = if (repeat.getNumber()) |num| if (num < 1 or num > std.math.maxInt(u31)) 1 else @intFromFloat(num) else 1;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L613
|
||||
TimeoutObject.js.idleTimeoutSetCached(timer, global, repeat);
|
||||
this.strong_this.set(global, timer);
|
||||
this.flags.kind = .setInterval;
|
||||
this.interval = new_interval;
|
||||
this.reschedule(timer, vm);
|
||||
}
|
||||
|
||||
pub fn run(this: *TimerObjectInternals, globalThis: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue, async_id: u64, vm: *JSC.VirtualMachine) bool {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.willDispatchAsyncCall(globalThis, .DOMTimer, async_id);
|
||||
}
|
||||
|
||||
defer {
|
||||
if (vm.isInspectorEnabled()) {
|
||||
Debugger.didDispatchAsyncCall(globalThis, .DOMTimer, async_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Bun__JSTimeout__call handles exceptions.
|
||||
this.flags.in_callback = true;
|
||||
defer this.flags.in_callback = false;
|
||||
return Bun__JSTimeout__call(globalThis, timer, callback, arguments);
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
this: *TimerObjectInternals,
|
||||
timer: JSValue,
|
||||
global: *JSGlobalObject,
|
||||
id: i32,
|
||||
kind: Kind,
|
||||
interval: u31,
|
||||
callback: JSValue,
|
||||
arguments: JSValue,
|
||||
) void {
|
||||
const vm = global.bunVM();
|
||||
this.* = .{
|
||||
.id = id,
|
||||
.flags = .{ .kind = kind, .epoch = vm.timer.epoch },
|
||||
.interval = interval,
|
||||
};
|
||||
|
||||
if (kind == .setImmediate) {
|
||||
ImmediateObject.js.argumentsSetCached(timer, global, arguments);
|
||||
ImmediateObject.js.callbackSetCached(timer, global, callback);
|
||||
const parent: *ImmediateObject = @fieldParentPtr("internals", this);
|
||||
vm.enqueueImmediateTask(parent);
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
// ref'd by event loop
|
||||
parent.ref();
|
||||
} else {
|
||||
TimeoutObject.js.argumentsSetCached(timer, global, arguments);
|
||||
TimeoutObject.js.callbackSetCached(timer, global, callback);
|
||||
TimeoutObject.js.idleTimeoutSetCached(timer, global, JSC.jsNumber(interval));
|
||||
TimeoutObject.js.repeatSetCached(timer, global, if (kind == .setInterval) JSC.jsNumber(interval) else .null);
|
||||
|
||||
// this increments the refcount
|
||||
this.reschedule(timer, vm);
|
||||
}
|
||||
|
||||
this.strong_this.set(global, timer);
|
||||
}
|
||||
|
||||
pub fn doRef(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
this_value.ensureStillAlive();
|
||||
|
||||
const did_have_js_ref = this.flags.has_js_ref;
|
||||
this.flags.has_js_ref = true;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L256
|
||||
// and
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L685-L687
|
||||
if (!did_have_js_ref and !this.flags.has_cleared_timer) {
|
||||
this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), true);
|
||||
}
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn doRefresh(this: *TimerObjectInternals, globalObject: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
// Immediates do not have a refresh function, and our binding generator should not let this
|
||||
// function be reached even if you override the `this` value calling a Timeout object's
|
||||
// `refresh` method
|
||||
assert(this.flags.kind != .setImmediate);
|
||||
|
||||
// setImmediate does not support refreshing and we do not support refreshing after cleanup
|
||||
if (this.id == -1 or this.flags.kind == .setImmediate or this.flags.has_cleared_timer) {
|
||||
return this_value;
|
||||
}
|
||||
|
||||
this.strong_this.set(globalObject, this_value);
|
||||
this.reschedule(this_value, VirtualMachine.get());
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn doUnref(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue {
|
||||
this_value.ensureStillAlive();
|
||||
|
||||
const did_have_js_ref = this.flags.has_js_ref;
|
||||
this.flags.has_js_ref = false;
|
||||
|
||||
if (did_have_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), false);
|
||||
}
|
||||
|
||||
return this_value;
|
||||
}
|
||||
|
||||
pub fn cancel(this: *TimerObjectInternals, vm: *VirtualMachine) void {
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
this.flags.has_cleared_timer = true;
|
||||
|
||||
if (this.flags.kind == .setImmediate) return;
|
||||
|
||||
const was_active = this.eventLoopTimer().state == .ACTIVE;
|
||||
|
||||
this.eventLoopTimer().state = .CANCELLED;
|
||||
this.strong_this.deinit();
|
||||
|
||||
if (was_active) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
this.deref();
|
||||
}
|
||||
}
|
||||
|
||||
fn shouldRescheduleTimer(this: *TimerObjectInternals, repeat: JSValue, idle_timeout: JSValue) bool {
|
||||
if (this.flags.kind == .setInterval and repeat.isNull()) return false;
|
||||
if (idle_timeout.getNumber()) |num| {
|
||||
if (num == -1) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn reschedule(this: *TimerObjectInternals, timer: JSValue, vm: *VirtualMachine) void {
|
||||
if (this.flags.kind == .setImmediate) return;
|
||||
|
||||
const idle_timeout = TimeoutObject.js.idleTimeoutGetCached(timer).?;
|
||||
const repeat = TimeoutObject.js.repeatGetCached(timer).?;
|
||||
|
||||
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L612
|
||||
if (!this.shouldRescheduleTimer(repeat, idle_timeout)) return;
|
||||
|
||||
const now = timespec.msFromNow(this.interval);
|
||||
const was_active = this.eventLoopTimer().state == .ACTIVE;
|
||||
if (was_active) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
} else {
|
||||
this.ref();
|
||||
}
|
||||
|
||||
vm.timer.update(this.eventLoopTimer(), &now);
|
||||
this.flags.has_cleared_timer = false;
|
||||
|
||||
if (this.flags.has_js_ref) {
|
||||
this.setEnableKeepingEventLoopAlive(vm, true);
|
||||
}
|
||||
}
|
||||
|
||||
fn setEnableKeepingEventLoopAlive(this: *TimerObjectInternals, vm: *VirtualMachine, enable: bool) void {
|
||||
if (this.flags.is_keeping_event_loop_alive == enable) {
|
||||
return;
|
||||
}
|
||||
this.flags.is_keeping_event_loop_alive = enable;
|
||||
switch (this.flags.kind) {
|
||||
.setTimeout, .setInterval => vm.timer.incrementTimerRef(if (enable) 1 else -1),
|
||||
|
||||
// setImmediate has slightly different event loop logic
|
||||
.setImmediate => vm.timer.incrementImmediateRef(if (enable) 1 else -1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hasRef(this: *TimerObjectInternals) JSValue {
|
||||
return JSValue.jsBoolean(this.flags.is_keeping_event_loop_alive);
|
||||
}
|
||||
|
||||
pub fn toPrimitive(this: *TimerObjectInternals) bun.JSError!JSValue {
|
||||
if (!this.flags.has_accessed_primitive) {
|
||||
this.flags.has_accessed_primitive = true;
|
||||
const vm = VirtualMachine.get();
|
||||
try vm.timer.maps.get(this.flags.kind).put(bun.default_allocator, this.id, this.eventLoopTimer());
|
||||
}
|
||||
return JSValue.jsNumber(this.id);
|
||||
}
|
||||
|
||||
/// This is the getter for `_destroyed` on JS Timeout and Immediate objects
|
||||
pub fn getDestroyed(this: *TimerObjectInternals) bool {
|
||||
if (this.flags.has_cleared_timer) {
|
||||
return true;
|
||||
}
|
||||
if (this.flags.in_callback) {
|
||||
return false;
|
||||
}
|
||||
return switch (this.eventLoopTimer().state) {
|
||||
.ACTIVE, .PENDING => false,
|
||||
.FIRED, .CANCELLED => true,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TimerObjectInternals) void {
|
||||
this.strong_this.deinit();
|
||||
this.deref();
|
||||
}
|
||||
|
||||
pub fn deinit(this: *TimerObjectInternals) void {
|
||||
this.strong_this.deinit();
|
||||
const vm = VirtualMachine.get();
|
||||
const kind = this.flags.kind;
|
||||
|
||||
if (this.eventLoopTimer().state == .ACTIVE) {
|
||||
vm.timer.remove(this.eventLoopTimer());
|
||||
}
|
||||
|
||||
if (this.flags.has_accessed_primitive) {
|
||||
const map = vm.timer.maps.get(kind);
|
||||
if (map.orderedRemove(this.id)) {
|
||||
// If this array gets large, let's shrink it down
|
||||
// Array keys are i32
|
||||
// Values are 1 ptr
|
||||
// Therefore, 12 bytes per entry
|
||||
// So if you created 21,000 timers and accessed them by ID, you'd be using 252KB
|
||||
const allocated_bytes = map.capacity() * @sizeOf(TimeoutMap.Data);
|
||||
const used_bytes = map.count() * @sizeOf(TimeoutMap.Data);
|
||||
if (allocated_bytes - used_bytes > 256 * 1024) {
|
||||
map.shrinkAndFree(bun.default_allocator, map.count() + 8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.setEnableKeepingEventLoopAlive(vm, false);
|
||||
switch (kind) {
|
||||
.setImmediate => (@as(*ImmediateObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(),
|
||||
.setTimeout, .setInterval => (@as(*TimeoutObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(),
|
||||
}
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const JSC = bun.JSC;
|
||||
const VirtualMachine = JSC.VirtualMachine;
|
||||
const TimeoutObject = @import("../Timer.zig").TimeoutObject;
|
||||
const ImmediateObject = @import("../Timer.zig").ImmediateObject;
|
||||
const Debugger = @import("../../Debugger.zig");
|
||||
const timespec = bun.timespec;
|
||||
const Environment = bun.Environment;
|
||||
const ID = @import("../Timer.zig").ID;
|
||||
const TimeoutMap = @import("../Timer.zig").TimeoutMap;
|
||||
const Kind = @import("../Timer.zig").Kind;
|
||||
const EventLoopTimer = @import("../Timer.zig").EventLoopTimer;
|
||||
const JSValue = JSC.JSValue;
|
||||
const JSGlobalObject = JSC.JSGlobalObject;
|
||||
const assert = bun.assert;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user