mirror of
https://github.com/oven-sh/bun
synced 2026-02-08 09:58:55 +00:00
Compare commits
203 Commits
codex/fix-
...
codex/impl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
705a4a991a | ||
|
|
03f5a385b2 | ||
|
|
3c1a1b5634 | ||
|
|
c15190990c | ||
|
|
177239cff5 | ||
|
|
ea7068a531 | ||
|
|
46cd5b10a1 | ||
|
|
b87cf4f247 | ||
|
|
d3bc5e391f | ||
|
|
f9712ce309 | ||
|
|
5e0caa0aa4 | ||
|
|
4cf31f6a57 | ||
|
|
3f257a2905 | ||
|
|
ba126fb330 | ||
|
|
2072fa1d59 | ||
|
|
61024b2b4a | ||
|
|
90e3d6c898 | ||
|
|
e8b652a5d9 | ||
|
|
5131e66fa5 | ||
|
|
c019f86f14 | ||
|
|
354391a263 | ||
|
|
17120cefdc | ||
|
|
be7db0d37a | ||
|
|
299c6c9b21 | ||
|
|
f1c2a611ad | ||
|
|
7d9dd67586 | ||
|
|
ccb0ed13c2 | ||
|
|
b58daf86da | ||
|
|
050a9cecb7 | ||
|
|
0a3ac50931 | ||
|
|
fe0bb68d17 | ||
|
|
bc79a48ce4 | ||
|
|
2081e5b656 | ||
|
|
e30d6d21f5 | ||
|
|
81e1a9d54d | ||
|
|
1faeba01b9 | ||
|
|
19540001d1 | ||
|
|
da0bc0b0d2 | ||
|
|
95e12374ed | ||
|
|
4cc61a1b8c | ||
|
|
5416155449 | ||
|
|
c7b1e5c709 | ||
|
|
444b9d1883 | ||
|
|
197c7abe7d | ||
|
|
653c459660 | ||
|
|
25dbe5cf3f | ||
|
|
2cbb196f29 | ||
|
|
064d7bb56e | ||
|
|
37505ad955 | ||
|
|
c40468ea39 | ||
|
|
29dd4166f2 | ||
|
|
0b5363099b | ||
|
|
282dda62c8 | ||
|
|
fd91e3de0d | ||
|
|
633f4f593d | ||
|
|
fd5e777639 | ||
|
|
770c1c8327 | ||
|
|
41d10ed01e | ||
|
|
bb55b2596d | ||
|
|
197443b2db | ||
|
|
b62f70c23a | ||
|
|
d4ccba67f2 | ||
|
|
43777cffee | ||
|
|
3aedf0692c | ||
|
|
346e97dde2 | ||
|
|
aa37ecb7a5 | ||
|
|
9811a2b53e | ||
|
|
b9e72d0d2e | ||
|
|
b7d4b14b3d | ||
|
|
59e1320fb1 | ||
|
|
e402adaebf | ||
|
|
3773ceeb7e | ||
|
|
162a9b66d8 | ||
|
|
6274f10096 | ||
|
|
978540902c | ||
|
|
b99a1256ff | ||
|
|
8a1d8047f1 | ||
|
|
a473657adb | ||
|
|
775c3b1987 | ||
|
|
7ee98852c6 | ||
|
|
f46df399eb | ||
|
|
c103b57bcc | ||
|
|
3b3cde9e74 | ||
|
|
2482af60d5 | ||
|
|
2245b5efd6 | ||
|
|
155475693b | ||
|
|
f5b42e1507 | ||
|
|
139f2b23a2 | ||
|
|
28006d0ad4 | ||
|
|
c44515eaaf | ||
|
|
e0924ef226 | ||
|
|
9499f21518 | ||
|
|
6b4662ff55 | ||
|
|
a445b45e55 | ||
|
|
82b34bbbdd | ||
|
|
4d905123fa | ||
|
|
c6deb4527c | ||
|
|
f3bca62a77 | ||
|
|
62794850fa | ||
|
|
f53aff0935 | ||
|
|
9c5797e2f5 | ||
|
|
4329a66a1d | ||
|
|
12a4b95b34 | ||
|
|
cf00cb495c | ||
|
|
5763a8e533 | ||
|
|
dedd433cbf | ||
|
|
d6590c4bfa | ||
|
|
07d3d6c9f6 | ||
|
|
631e674842 | ||
|
|
3d19c1156c | ||
|
|
7a069d7214 | ||
|
|
6ebad50543 | ||
|
|
8750f0b884 | ||
|
|
c38bace86c | ||
|
|
9fd18361f2 | ||
|
|
de6739c401 | ||
|
|
2801cb1f4a | ||
|
|
b642e36da2 | ||
|
|
df3337936c | ||
|
|
ea05de59b3 | ||
|
|
601b8e3aaa | ||
|
|
a11d9e2cd4 | ||
|
|
df84f665a5 | ||
|
|
498186764a | ||
|
|
02a7d71b70 | ||
|
|
c9761d4aa6 | ||
|
|
c863e7582f | ||
|
|
d4710c6e86 | ||
|
|
e1f3796677 | ||
|
|
ec07ef83a2 | ||
|
|
eddee1b8cb | ||
|
|
fa1d37b4e3 | ||
|
|
5b0523a32a | ||
|
|
5039310199 | ||
|
|
61e03a2758 | ||
|
|
27abb51561 | ||
|
|
09d0846d1b | ||
|
|
8e7cdb8493 | ||
|
|
538caa4d5e | ||
|
|
24bc236eb7 | ||
|
|
f59050fc23 | ||
|
|
1b092f156b | ||
|
|
6a79b9ef87 | ||
|
|
f62940bbda | ||
|
|
c82345c0a0 | ||
|
|
817d0464f6 | ||
|
|
a5bb525614 | ||
|
|
4cb7910e32 | ||
|
|
d7970946eb | ||
|
|
014fb6be8f | ||
|
|
5c7991b707 | ||
|
|
da5fc817d1 | ||
|
|
407c4e800a | ||
|
|
11070b8e16 | ||
|
|
adfdaab4fd | ||
|
|
bfd7fc06c7 | ||
|
|
bd3abc5a2a | ||
|
|
193193024f | ||
|
|
6edc3a9900 | ||
|
|
1bd44e9ce7 | ||
|
|
c7327d62c2 | ||
|
|
90dda8219f | ||
|
|
885979644d | ||
|
|
13c5b0d9cb | ||
|
|
d6e45afef9 | ||
|
|
300aedd9cc | ||
|
|
d9cf836b67 | ||
|
|
293215778f | ||
|
|
95346bd919 | ||
|
|
ceaaed4848 | ||
|
|
abaa69183b | ||
|
|
3e1075410b | ||
|
|
7a88bb0e1c | ||
|
|
7a790581e0 | ||
|
|
d5cc530024 | ||
|
|
d7548325b1 | ||
|
|
d11fd94cdb | ||
|
|
4cbd040485 | ||
|
|
773484a628 | ||
|
|
71c14fac7b | ||
|
|
b2a728e45d | ||
|
|
390798c172 | ||
|
|
284de53f26 | ||
|
|
5a025abddf | ||
|
|
4ab4b1b131 | ||
|
|
13ea970852 | ||
|
|
ba78d5b2c3 | ||
|
|
ce8767cdc8 | ||
|
|
082a9cb59c | ||
|
|
3c37f25b65 | ||
|
|
a079743a02 | ||
|
|
e0852fd651 | ||
|
|
6bbd1e0685 | ||
|
|
4534f6e635 | ||
|
|
c62a7a77a3 | ||
|
|
ecf5ea389f | ||
|
|
010ef4d119 | ||
|
|
4d77cd53f1 | ||
|
|
3cf353b755 | ||
|
|
fd894f5a65 | ||
|
|
a9969b7db2 | ||
|
|
27a08fca84 | ||
|
|
a398bd62a3 |
@@ -450,7 +450,7 @@ function getBuildCppStep(platform, options) {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
// We used to build the C++ dependencies and bun in seperate steps.
|
||||
// We used to build the C++ dependencies and bun in separate steps.
|
||||
// However, as long as the zig build takes longer than both sequentially,
|
||||
// it's cheaper to run them in the same step. Can be revisited in the future.
|
||||
command: [`${command} --target bun`, `${command} --target dependencies`],
|
||||
@@ -922,7 +922,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
@@ -930,7 +930,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into seperate steps (this will be very slow)",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
|
||||
23
.claude/commands/upgrade-webkit.md
Normal file
23
.claude/commands/upgrade-webkit.md
Normal file
@@ -0,0 +1,23 @@
|
||||
Upgrade Bun's Webkit fork to the latest upstream version of Webkit.
|
||||
|
||||
To do that:
|
||||
|
||||
- cd vendor/WebKit
|
||||
- git fetch upstream
|
||||
- git merge upstream main
|
||||
- Fix the merge conflicts
|
||||
- cd ../../ (back to bun)
|
||||
- make jsc-build (this will take about 7 minutes)
|
||||
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
|
||||
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
|
||||
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
|
||||
- cd vendor/WebKit
|
||||
- git commit -am "Upgrade Webkit to the latest version"
|
||||
- git push
|
||||
- get the commit SHA in the vendor/WebKit directory of your new commit
|
||||
- cd ../../ (back to bun)
|
||||
- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit
|
||||
- git checkout -b bun/webkit-upgrade-<commit-sha>
|
||||
- commit + push (without adding the webkit-changes.md file)
|
||||
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
|
||||
- delete the webkit-changes.md file
|
||||
@@ -91,7 +91,7 @@ devTest("html file is watched", {
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
|
||||
@@ -11,10 +11,10 @@ You'll find all of Bun's tests in the `test/` directory.
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
|
||||
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
|
||||
@@ -5,4 +5,6 @@
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
dedd433cbf2e2fe38e51bc166e08fbcc601ad42b # JSValue.undefined -> .jsUndefined()
|
||||
6b4662ff55f58247cc2fd22e85b4f9805b0950a5 # JSValue.jsUndefined() -> .js_undefined
|
||||
|
||||
13
.github/CODEOWNERS
vendored
13
.github/CODEOWNERS
vendored
@@ -1,18 +1,5 @@
|
||||
# Project
|
||||
/.github/CODEOWNERS @Jarred-Sumner
|
||||
|
||||
# Build system
|
||||
/CMakeLists.txt @Electroid
|
||||
/cmake/*.cmake @Electroid
|
||||
/scripts/ @Electroid
|
||||
|
||||
# CI
|
||||
/.buildkite/ @Electroid
|
||||
/.github/workflows/ @Electroid
|
||||
|
||||
# Debugger protocol
|
||||
/packages/bun-inspector-protocol/ @Electroid
|
||||
/packages/bun-debug-adapter-protocol/ @Electroid
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
1
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -2,6 +2,7 @@ name: Prefilled crash report
|
||||
description: Report a crash in Bun
|
||||
labels:
|
||||
- crash
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
33
.github/workflows/codex-test-sync.yml
vendored
33
.github/workflows/codex-test-sync.yml
vendored
@@ -5,14 +5,15 @@ on:
|
||||
types: [labeled, opened]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "canary"
|
||||
BUN_VERSION: "1.2.15"
|
||||
|
||||
jobs:
|
||||
sync-node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex'))
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
|
||||
contains(github.head_ref, 'codex')
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
@@ -28,15 +29,27 @@ jobs:
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Get changed files and sync tests
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: |
|
||||
test/js/node/test/parallel/**/*.{js,mjs,ts}
|
||||
test/js/node/test/sequential/**/*.{js,mjs,ts}
|
||||
|
||||
- name: Sync tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Get the list of changed files from the PR
|
||||
git diff --name-only origin/main...HEAD | while read -r file; do
|
||||
if [[ "$file" =~ ^test/js/node/test/(parallel|sequential)/(.+)\.js$ ]]; then
|
||||
test_name="${BASH_REMATCH[2]}"
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
fi
|
||||
echo "Changed test files:"
|
||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
||||
|
||||
# Process each changed test file
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
# Extract test name from file path
|
||||
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
done
|
||||
|
||||
- name: Commit changes
|
||||
|
||||
4
.github/workflows/format.yml
vendored
4
.github/workflows/format.yml
vendored
@@ -45,7 +45,9 @@ jobs:
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src/**.zig
|
||||
zig fmt src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
10
.github/workflows/update-zstd.yml
vendored
10
.github/workflows/update-zstd.yml
vendored
@@ -21,16 +21,16 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildZstd.cmake)
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/CloneZstd.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildZstd.cmake"
|
||||
echo "Error: Could not find COMMIT line in CloneZstd.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildZstd.cmake"
|
||||
echo "Error: Invalid git hash format in CloneZstd.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildZstd.cmake
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/CloneZstd.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildZstd.cmake
|
||||
cmake/targets/CloneZstd.cmake
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
|
||||
17
.lldbinit
17
.lldbinit
@@ -1,16 +1 @@
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
command source -C -s true -e true misctools/lldb/init.lldb
|
||||
|
||||
237
CLAUDE.md
Normal file
237
CLAUDE.md
Normal file
@@ -0,0 +1,237 @@
|
||||
This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed for speed, with a bundler, test runner, and Node.js-compatible package manager. It's written primarily in Zig with C++ for JavaScriptCore integration, powered by WebKit's JavaScriptCore engine.
|
||||
|
||||
## Building and Running Bun
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
|
||||
## Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
- **Single test file**: `bun bd test test/js/bun/http/serve.test.ts`
|
||||
- **Fuzzy match test file**: `bun bd test http/serve.test.ts`
|
||||
- **With filter**: `bun bd test test/js/bun/http/serve.test.ts -t "should handle"`
|
||||
|
||||
### Test Organization
|
||||
|
||||
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
|
||||
- `test/js/node/` - Node.js compatibility tests
|
||||
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
|
||||
- `test/cli/` - CLI command tests (install, run, test, etc.)
|
||||
- `test/regression/issue/` - Regression tests (create one per bug fix)
|
||||
- `test/bundler/` - Bundler and transpiler tests
|
||||
- `test/integration/` - End-to-end integration tests
|
||||
- `test/napi/` - N-API compatibility tests
|
||||
- `test/v8/` - V8 C++ API compatibility tests
|
||||
|
||||
### Writing Tests
|
||||
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
// Spawn Bun process
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toBe("hello\n");
|
||||
});
|
||||
```
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
|
||||
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
|
||||
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
|
||||
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
|
||||
|
||||
### Core Source Organization
|
||||
|
||||
#### Runtime Core (`src/`)
|
||||
|
||||
- `bun.zig` - Main entry point
|
||||
- `cli.zig` - CLI command orchestration
|
||||
- `js_parser.zig`, `js_lexer.zig`, `js_printer.zig` - JavaScript parsing/printing
|
||||
- `transpiler.zig` - Wrapper around js_parser with sourcemap support
|
||||
- `resolver/` - Module resolution system
|
||||
- `allocators/` - Custom memory allocators for performance
|
||||
|
||||
#### JavaScript Runtime (`src/bun.js/`)
|
||||
|
||||
- `bindings/` - C++ JavaScriptCore bindings
|
||||
- Generated classes from `.classes.ts` files
|
||||
- Manual bindings for complex APIs
|
||||
- `api/` - Bun-specific APIs
|
||||
- `server.zig` - HTTP server implementation
|
||||
- `FFI.zig` - Foreign Function Interface
|
||||
- `crypto.zig` - Cryptographic operations
|
||||
- `glob.zig` - File pattern matching
|
||||
- `node/` - Node.js compatibility layer
|
||||
- Module implementations (fs, path, crypto, etc.)
|
||||
- Process and Buffer APIs
|
||||
- `webcore/` - Web API implementations
|
||||
- `fetch.zig` - Fetch API
|
||||
- `streams.zig` - Web Streams
|
||||
- `Blob.zig`, `Response.zig`, `Request.zig`
|
||||
- `event_loop/` - Event loop and task management
|
||||
|
||||
#### Build Tools & Package Manager
|
||||
|
||||
- `src/bundler/` - JavaScript bundler
|
||||
- Advanced tree-shaking
|
||||
- CSS processing
|
||||
- HTML handling
|
||||
- `src/install/` - Package manager
|
||||
- `lockfile/` - Lockfile handling
|
||||
- `npm.zig` - npm registry client
|
||||
- `lifecycle_script_runner.zig` - Package scripts
|
||||
|
||||
#### Other Key Components
|
||||
|
||||
- `src/shell/` - Cross-platform shell implementation
|
||||
- `src/css/` - CSS parser and processor
|
||||
- `src/http/` - HTTP client implementation
|
||||
- `websocket_client/` - WebSocket client (including deflate support)
|
||||
- `src/sql/` - SQL database integrations
|
||||
- `src/bake/` - Server-side rendering framework
|
||||
|
||||
### JavaScript Class Implementation (C++)
|
||||
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
|
||||
2. Define properties using HashTableValue arrays
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
|
||||
- `src/codegen/generate-classes.ts` - Generates Zig & C++ bindings from `*.classes.ts` files
|
||||
- `src/codegen/generate-jssink.ts` - Generates stream-related classes
|
||||
- `src/codegen/bundle-modules.ts` - Bundles built-in modules like `node:fs`
|
||||
- `src/codegen/bundle-functions.ts` - Bundles global functions like `ReadableStream`
|
||||
|
||||
In development, bundled modules can be reloaded without rebuilding Zig by running `bun run build`.
|
||||
|
||||
## JavaScript Modules (`src/js/`)
|
||||
|
||||
Built-in JavaScript modules use special syntax and are organized as:
|
||||
|
||||
- `node/` - Node.js compatibility modules (`node:fs`, `node:path`, etc.)
|
||||
- `bun/` - Bun-specific modules (`bun:ffi`, `bun:sqlite`, etc.)
|
||||
- `thirdparty/` - NPM modules we replace (like `ws`)
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
@@ -144,6 +144,14 @@ $ bun bd test foo.test.ts
|
||||
$ bun bd ./foo.ts
|
||||
```
|
||||
|
||||
Bun generally takes about 2.5 minutes to compile a debug build when there are Zig changes. If your development workflow is "change one line, save, rebuild", you will spend too much time waiting for the build to finish. Instead:
|
||||
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
|
||||
@@ -179,6 +187,7 @@ To run a release build from a pull request, you can use the `bun-pr` npm package
|
||||
bunx bun-pr <pr-number>
|
||||
bunx bun-pr <branch-name>
|
||||
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
|
||||
bunx bun-pr --asan <pr-number> # Linux x64 only
|
||||
```
|
||||
|
||||
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
|
||||
@@ -189,24 +198,18 @@ bun-1234566 --version
|
||||
|
||||
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
|
||||
|
||||
## Valgrind
|
||||
## AddressSanitizer
|
||||
|
||||
On Linux, valgrind can help find memory issues.
|
||||
[AddressSanitizer](https://en.wikipedia.org/wiki/AddressSanitizer) helps find memory issues, and is enabled by default in debug builds of Bun on Linux and macOS. This includes the Zig code and all dependencies. It makes the Zig code take about 2x longer to build, if that's stopping you from being productive you can disable it by setting `-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>` to `-Denable_asan=false` in the `cmake/targets/BuildBun.cmake` file, but generally we recommend batching your changes up between builds.
|
||||
|
||||
Keep in mind:
|
||||
|
||||
- JavaScriptCore doesn't support valgrind. It will report spurious errors.
|
||||
- Valgrind is slow
|
||||
- Mimalloc will sometimes cause spurious errors when debug build is enabled
|
||||
|
||||
You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager.
|
||||
|
||||
`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang.
|
||||
To build a release build with Address Sanitizer, run:
|
||||
|
||||
```bash
|
||||
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
$ bun run build:release:asan
|
||||
```
|
||||
|
||||
In CI, we run our test suite with at least one target that is built with Address Sanitizer.
|
||||
|
||||
## Building WebKit locally + Debug mode of JSC
|
||||
|
||||
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
|
||||
|
||||
@@ -47,6 +47,8 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
|
||||
> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.sh/docs/installation#cpu-requirements-and-baseline-builds)
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
@@ -50,6 +50,10 @@ bench("murmur64v2 (short)", () => {
|
||||
Bun.hash.murmur64v2(shortStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (short)", () => {
|
||||
Bun.hash.rapidhash(shortStr);
|
||||
});
|
||||
|
||||
bench("wyhash (128 KB)", () => {
|
||||
Bun.hash.wyhash(longStr);
|
||||
});
|
||||
@@ -94,4 +98,8 @@ bench("murmur64v2 (128 KB)", () => {
|
||||
Bun.hash.murmur64v2(longStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (128 KB)", () => {
|
||||
Bun.hash.rapidhash(longStr);
|
||||
});
|
||||
|
||||
run();
|
||||
|
||||
27
build.zig
27
build.zig
@@ -63,6 +63,7 @@ const BunBuildOptions = struct {
|
||||
/// `./build/codegen` or equivalent
|
||||
codegen_path: []const u8,
|
||||
no_llvm: bool,
|
||||
override_no_export_cpp_apis: bool,
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
@@ -95,6 +96,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
|
||||
const mod = opts.createModule();
|
||||
this.cached_options_module = mod;
|
||||
@@ -206,6 +208,7 @@ pub fn build(b: *Build) !void {
|
||||
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
|
||||
|
||||
const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false;
|
||||
const override_no_export_cpp_apis = b.option(bool, "override-no-export-cpp-apis", "Override the default export_cpp_apis logic to disable exports") orelse false;
|
||||
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
@@ -217,6 +220,7 @@ pub fn build(b: *Build) !void {
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.no_llvm = no_llvm,
|
||||
.override_no_export_cpp_apis = override_no_export_cpp_apis,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
@@ -386,6 +390,12 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
@@ -393,6 +403,13 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-macos-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
@@ -400,6 +417,13 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-linux-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
|
||||
// zig build translate-c-headers
|
||||
{
|
||||
@@ -476,6 +500,7 @@ fn addMultiCheck(
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
var obj = addBunObject(b, &options);
|
||||
@@ -508,6 +533,8 @@ fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize:
|
||||
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
|
||||
}
|
||||
|
||||
translate_c.addIncludePath(b.path("vendor/zstd/lib"));
|
||||
|
||||
if (target.result.os.tag == .windows) {
|
||||
// translate-c is unable to translate the unsuffixed windows functions
|
||||
// like `SetCurrentDirectory` since they are defined with an odd macro
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"src/bun.js/bindings/webcrypto/*/*.cpp",
|
||||
"src/bun.js/bindings/node/*.cpp",
|
||||
"src/bun.js/bindings/node/crypto/*.cpp",
|
||||
"src/bun.js/bindings/node/http/*.cpp",
|
||||
"src/bun.js/bindings/v8/*.cpp",
|
||||
"src/bun.js/bindings/v8/shim/*.cpp",
|
||||
"src/bake/*.cpp",
|
||||
@@ -60,7 +61,9 @@
|
||||
"packages/bun-usockets/src/internal/*.c",
|
||||
"packages/bun-usockets/src/crypto/*.c",
|
||||
"src/bun.js/bindings/uv-posix-polyfills.c",
|
||||
"src/bun.js/bindings/uv-posix-stubs.c"
|
||||
"src/bun.js/bindings/uv-posix-stubs.c",
|
||||
"src/*.c",
|
||||
"src/bun.js/bindings/node/http/llhttp/*.c"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -7,5 +7,9 @@ packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
|
||||
@@ -28,6 +28,7 @@ src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
@@ -99,6 +100,9 @@ src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
@@ -144,6 +148,13 @@ src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
@@ -159,6 +170,7 @@ src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
@@ -167,6 +179,7 @@ src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
|
||||
@@ -52,6 +52,7 @@ src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
|
||||
@@ -10,7 +10,26 @@ src/allocators/NullableAllocator.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
src/ast/base.zig
|
||||
src/ast/Binding.zig
|
||||
src/ast/BundledAst.zig
|
||||
src/ast/CharFreq.zig
|
||||
src/ast/E.zig
|
||||
src/ast/Expr.zig
|
||||
src/ast/G.zig
|
||||
src/ast/Macro.zig
|
||||
src/ast/NewStore.zig
|
||||
src/ast/Op.zig
|
||||
src/ast/S.zig
|
||||
src/ast/Scope.zig
|
||||
src/ast/ServerComponentBoundary.zig
|
||||
src/ast/Stmt.zig
|
||||
src/ast/Symbol.zig
|
||||
src/ast/TS.zig
|
||||
src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
@@ -32,7 +51,11 @@ src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
src/bun.js/api/bun/socket.zig
|
||||
src/bun.js/api/bun/socket/Handlers.zig
|
||||
src/bun.js/api/bun/socket/Listener.zig
|
||||
src/bun.js/api/bun/socket/SocketAddress.zig
|
||||
src/bun.js/api/bun/socket/tls_socket_functions.zig
|
||||
src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig
|
||||
src/bun.js/api/bun/spawn.zig
|
||||
src/bun.js/api/bun/spawn/stdio.zig
|
||||
src/bun.js/api/bun/ssl_wrapper.zig
|
||||
@@ -55,13 +78,24 @@ src/bun.js/api/html_rewriter.zig
|
||||
src/bun.js/api/JSBundler.zig
|
||||
src/bun.js/api/JSTranspiler.zig
|
||||
src/bun.js/api/server.zig
|
||||
src/bun.js/api/server/AnyRequestContext.zig
|
||||
src/bun.js/api/server/FileRoute.zig
|
||||
src/bun.js/api/server/HTMLBundle.zig
|
||||
src/bun.js/api/server/HTTPStatusText.zig
|
||||
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
|
||||
src/bun.js/api/server/NodeHTTPResponse.zig
|
||||
src/bun.js/api/server/RequestContext.zig
|
||||
src/bun.js/api/server/ServerConfig.zig
|
||||
src/bun.js/api/server/ServerWebSocket.zig
|
||||
src/bun.js/api/server/SSLConfig.zig
|
||||
src/bun.js/api/server/StaticRoute.zig
|
||||
src/bun.js/api/server/WebSocketServerContext.zig
|
||||
src/bun.js/api/streams.classes.zig
|
||||
src/bun.js/api/Timer.zig
|
||||
src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -70,6 +104,7 @@ src/bun.js/bindings/AnyPromise.zig
|
||||
src/bun.js/bindings/bun-simdutf.zig
|
||||
src/bun.js/bindings/CachedBytecode.zig
|
||||
src/bun.js/bindings/CallFrame.zig
|
||||
src/bun.js/bindings/CatchScope.zig
|
||||
src/bun.js/bindings/codegen.zig
|
||||
src/bun.js/bindings/CommonAbortReason.zig
|
||||
src/bun.js/bindings/CommonStrings.zig
|
||||
@@ -105,6 +140,7 @@ src/bun.js/bindings/JSPropertyIterator.zig
|
||||
src/bun.js/bindings/JSRef.zig
|
||||
src/bun.js/bindings/JSRuntimeType.zig
|
||||
src/bun.js/bindings/JSString.zig
|
||||
src/bun.js/bindings/JSType.zig
|
||||
src/bun.js/bindings/JSUint8Array.zig
|
||||
src/bun.js/bindings/JSValue.zig
|
||||
src/bun.js/bindings/NodeModuleModule.zig
|
||||
@@ -133,6 +169,21 @@ src/bun.js/ConsoleObject.zig
|
||||
src/bun.js/Counters.zig
|
||||
src/bun.js/Debugger.zig
|
||||
src/bun.js/event_loop.zig
|
||||
src/bun.js/event_loop/AnyEventLoop.zig
|
||||
src/bun.js/event_loop/AnyTask.zig
|
||||
src/bun.js/event_loop/AnyTaskWithExtraContext.zig
|
||||
src/bun.js/event_loop/ConcurrentPromiseTask.zig
|
||||
src/bun.js/event_loop/ConcurrentTask.zig
|
||||
src/bun.js/event_loop/CppTask.zig
|
||||
src/bun.js/event_loop/DeferredTaskQueue.zig
|
||||
src/bun.js/event_loop/EventLoopHandle.zig
|
||||
src/bun.js/event_loop/GarbageCollectionController.zig
|
||||
src/bun.js/event_loop/JSCScheduler.zig
|
||||
src/bun.js/event_loop/ManagedTask.zig
|
||||
src/bun.js/event_loop/MiniEventLoop.zig
|
||||
src/bun.js/event_loop/PosixSignalHandle.zig
|
||||
src/bun.js/event_loop/Task.zig
|
||||
src/bun.js/event_loop/WorkTask.zig
|
||||
src/bun.js/hot_reloader.zig
|
||||
src/bun.js/ipc.zig
|
||||
src/bun.js/javascript_core_c_api.zig
|
||||
@@ -176,6 +227,9 @@ src/bun.js/node/util/parse_args_utils.zig
|
||||
src/bun.js/node/util/parse_args.zig
|
||||
src/bun.js/node/util/validators.zig
|
||||
src/bun.js/node/win_watcher.zig
|
||||
src/bun.js/node/zlib/NativeBrotli.zig
|
||||
src/bun.js/node/zlib/NativeZlib.zig
|
||||
src/bun.js/node/zlib/NativeZstd.zig
|
||||
src/bun.js/ProcessAutoKiller.zig
|
||||
src/bun.js/rare_data.zig
|
||||
src/bun.js/ResolveMessage.zig
|
||||
@@ -228,14 +282,47 @@ src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
src/bundler/BundleThread.zig
|
||||
src/bundler/Chunk.zig
|
||||
src/bundler/DeferredBatchTask.zig
|
||||
src/bundler/entry_points.zig
|
||||
src/bundler/Graph.zig
|
||||
src/bundler/HTMLImportManifest.zig
|
||||
src/bundler/linker_context/computeChunks.zig
|
||||
src/bundler/linker_context/computeCrossChunkDependencies.zig
|
||||
src/bundler/linker_context/convertStmtsForChunk.zig
|
||||
src/bundler/linker_context/convertStmtsForChunkForDevServer.zig
|
||||
src/bundler/linker_context/doStep5.zig
|
||||
src/bundler/linker_context/findAllImportedPartsInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedFilesInCSSOrder.zig
|
||||
src/bundler/linker_context/generateChunksInParallel.zig
|
||||
src/bundler/linker_context/generateCodeForFileInChunkJS.zig
|
||||
src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
src/bundler/ParseTask.zig
|
||||
src/bundler/ServerComponentParseTask.zig
|
||||
src/bundler/ThreadPool.zig
|
||||
src/bunfig.zig
|
||||
src/cache.zig
|
||||
src/ci_info.zig
|
||||
src/cli.zig
|
||||
src/cli/add_command.zig
|
||||
src/cli/add_completions.zig
|
||||
src/cli/Arguments.zig
|
||||
src/cli/audit_command.zig
|
||||
src/cli/build_command.zig
|
||||
src/cli/bunx_command.zig
|
||||
@@ -384,7 +471,22 @@ src/deps/picohttp.zig
|
||||
src/deps/picohttpparser.zig
|
||||
src/deps/tcc.zig
|
||||
src/deps/uws.zig
|
||||
src/deps/uws/App.zig
|
||||
src/deps/uws/BodyReaderMixin.zig
|
||||
src/deps/uws/ConnectingSocket.zig
|
||||
src/deps/uws/InternalLoopData.zig
|
||||
src/deps/uws/ListenSocket.zig
|
||||
src/deps/uws/Loop.zig
|
||||
src/deps/uws/Request.zig
|
||||
src/deps/uws/Response.zig
|
||||
src/deps/uws/socket.zig
|
||||
src/deps/uws/SocketContext.zig
|
||||
src/deps/uws/Timer.zig
|
||||
src/deps/uws/udp.zig
|
||||
src/deps/uws/UpgradedDuplex.zig
|
||||
src/deps/uws/us_socket_t.zig
|
||||
src/deps/uws/WebSocket.zig
|
||||
src/deps/uws/WindowsNamedPipe.zig
|
||||
src/deps/zig-clap/clap.zig
|
||||
src/deps/zig-clap/clap/args.zig
|
||||
src/deps/zig-clap/clap/comptime.zig
|
||||
@@ -405,6 +507,7 @@ src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/futex.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
@@ -425,6 +528,7 @@ src/http/mime_type.zig
|
||||
src/http/url_path.zig
|
||||
src/http/websocket_client.zig
|
||||
src/http/websocket_client/CppWebSocket.zig
|
||||
src/http/websocket_client/WebSocketDeflate.zig
|
||||
src/http/websocket_client/WebSocketUpgradeClient.zig
|
||||
src/http/websocket_http_client.zig
|
||||
src/http/websocket.zig
|
||||
@@ -434,7 +538,10 @@ src/import_record.zig
|
||||
src/ini.zig
|
||||
src/install/bin.zig
|
||||
src/install/dependency.zig
|
||||
src/install/ExternalSlice.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/hoisted_install.zig
|
||||
src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/lifecycle_script_runner.zig
|
||||
@@ -453,10 +560,28 @@ src/install/lockfile/printer/tree_printer.zig
|
||||
src/install/lockfile/printer/Yarn.zig
|
||||
src/install/lockfile/Tree.zig
|
||||
src/install/migration.zig
|
||||
src/install/NetworkTask.zig
|
||||
src/install/npm.zig
|
||||
src/install/PackageInstall.zig
|
||||
src/install/PackageInstaller.zig
|
||||
src/install/PackageManager.zig
|
||||
src/install/PackageManager/CommandLineArguments.zig
|
||||
src/install/PackageManager/install_with_manager.zig
|
||||
src/install/PackageManager/PackageJSONEditor.zig
|
||||
src/install/PackageManager/PackageManagerDirectories.zig
|
||||
src/install/PackageManager/PackageManagerEnqueue.zig
|
||||
src/install/PackageManager/PackageManagerLifecycle.zig
|
||||
src/install/PackageManager/PackageManagerOptions.zig
|
||||
src/install/PackageManager/PackageManagerResolution.zig
|
||||
src/install/PackageManager/patchPackage.zig
|
||||
src/install/PackageManager/processDependencyList.zig
|
||||
src/install/PackageManager/ProgressStrings.zig
|
||||
src/install/PackageManager/runTasks.zig
|
||||
src/install/PackageManager/updatePackageJSONAndInstall.zig
|
||||
src/install/PackageManager/UpdateRequest.zig
|
||||
src/install/PackageManager/WorkspacePackageJSONCache.zig
|
||||
src/install/PackageManagerTask.zig
|
||||
src/install/PackageManifestMap.zig
|
||||
src/install/padding_checker.zig
|
||||
src/install/patch_install.zig
|
||||
src/install/repository.zig
|
||||
@@ -468,11 +593,11 @@ src/install/windows-shim/bun_shim_impl.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
src/io/openForWriting.zig
|
||||
src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/io/time.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
@@ -539,6 +664,7 @@ src/semver/SemverString.zig
|
||||
src/semver/SlicedString.zig
|
||||
src/semver/Version.zig
|
||||
src/sha.zig
|
||||
src/shell/AllocScope.zig
|
||||
src/shell/braces.zig
|
||||
src/shell/Builtin.zig
|
||||
src/shell/builtin/basename.zig
|
||||
@@ -563,12 +689,27 @@ src/shell/builtin/yes.zig
|
||||
src/shell/EnvMap.zig
|
||||
src/shell/EnvStr.zig
|
||||
src/shell/interpreter.zig
|
||||
src/shell/IO.zig
|
||||
src/shell/IOReader.zig
|
||||
src/shell/IOWriter.zig
|
||||
src/shell/ParsedShellScript.zig
|
||||
src/shell/RefCountedStr.zig
|
||||
src/shell/shell.zig
|
||||
src/shell/states/Assigns.zig
|
||||
src/shell/states/Async.zig
|
||||
src/shell/states/Base.zig
|
||||
src/shell/states/Binary.zig
|
||||
src/shell/states/Cmd.zig
|
||||
src/shell/states/CondExpr.zig
|
||||
src/shell/states/Expansion.zig
|
||||
src/shell/states/If.zig
|
||||
src/shell/states/Pipeline.zig
|
||||
src/shell/states/Script.zig
|
||||
src/shell/states/Stmt.zig
|
||||
src/shell/states/Subshell.zig
|
||||
src/shell/subproc.zig
|
||||
src/shell/util.zig
|
||||
src/shell/Yield.zig
|
||||
src/sourcemap/CodeCoverage.zig
|
||||
src/sourcemap/LineOffsetTable.zig
|
||||
src/sourcemap/sourcemap.zig
|
||||
@@ -582,12 +723,16 @@ src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
|
||||
@@ -42,6 +42,29 @@ else()
|
||||
set(CONFIGURE_DEPENDS "")
|
||||
endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
BoringSSL
|
||||
Brotli
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
include(CloneZstd)
|
||||
# foreach(dependency ${BUN_DEPENDENCIES})
|
||||
# include(Clone${dependency})
|
||||
# endforeach()
|
||||
|
||||
# --- Codegen ---
|
||||
|
||||
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
|
||||
@@ -408,6 +431,7 @@ set(BUN_OBJECT_LUT_SOURCES
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
|
||||
)
|
||||
@@ -421,6 +445,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingFs.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingHTTPParser.lut.h
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
|
||||
)
|
||||
@@ -580,6 +605,7 @@ register_command(
|
||||
${BUN_ZIG_OUTPUT}
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -647,20 +673,14 @@ if(WIN32)
|
||||
else()
|
||||
set(Bun_VERSION_WITH_TAG ${VERSION})
|
||||
endif()
|
||||
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
|
||||
configure_file(${CWD}/src/bun.ico ${CODEGEN_PATH}/bun.ico COPYONLY)
|
||||
set(BUN_ICO_PATH ${CODEGEN_PATH}/bun.ico)
|
||||
configure_file(
|
||||
${CWD}/src/windows-app-info.rc
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT ${CODEGEN_PATH}/windows-app-info.res
|
||||
COMMAND rc.exe /fo ${CODEGEN_PATH}/windows-app-info.res ${CODEGEN_PATH}/windows-app-info.rc
|
||||
DEPENDS ${CODEGEN_PATH}/windows-app-info.rc ${CODEGEN_PATH}/bun.ico
|
||||
COMMENT "Adding Windows resource file ${CODEGEN_PATH}/windows-app-info.res with ico in ${CODEGEN_PATH}/bun.ico"
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.res)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -732,6 +752,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${CWD}/src/bun.js/bindings/webcore
|
||||
${CWD}/src/bun.js/bindings/webcrypto
|
||||
${CWD}/src/bun.js/bindings/node/crypto
|
||||
${CWD}/src/bun.js/bindings/node/http
|
||||
${CWD}/src/bun.js/bindings/sqlite
|
||||
${CWD}/src/bun.js/bindings/v8
|
||||
${CWD}/src/bun.js/modules
|
||||
@@ -890,6 +911,9 @@ if(NOT WIN32)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-Wno-nullability-completeness
|
||||
-Wno-inconsistent-dllimport
|
||||
-Wno-incompatible-pointer-types
|
||||
-Wno-deprecated-declarations
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1015,6 +1039,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1023,6 +1048,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
@@ -1046,22 +1072,6 @@ endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
BoringSSL
|
||||
Brotli
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_DEPENDENCIES Libuv)
|
||||
endif()
|
||||
|
||||
@@ -1,12 +1,3 @@
|
||||
register_repository(
|
||||
NAME
|
||||
zstd
|
||||
REPOSITORY
|
||||
facebook/zstd
|
||||
COMMIT
|
||||
f8745da6ff1ad1e7bab384bd1f9d742439278e99
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
zstd
|
||||
@@ -23,4 +14,6 @@ register_cmake_command(
|
||||
LIBRARIES
|
||||
zstd_static WIN32
|
||||
zstd UNIX
|
||||
INCLUDES
|
||||
lib
|
||||
)
|
||||
|
||||
8
cmake/targets/CloneZstd.cmake
Normal file
8
cmake/targets/CloneZstd.cmake
Normal file
@@ -0,0 +1,8 @@
|
||||
register_repository(
|
||||
NAME
|
||||
zstd
|
||||
REPOSITORY
|
||||
facebook/zstd
|
||||
COMMIT
|
||||
f8745da6ff1ad1e7bab384bd1f9d742439278e99
|
||||
)
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION b98e20b11e6ab044f73218bdd05ab064587b9ead)
|
||||
set(WEBKIT_VERSION f98cecf7d6d1528fcf0da3dc6a23ce95650d2e0c)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -55,13 +55,13 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
|
||||
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
|
||||
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
|
||||
|
||||
if(CI AND CMAKE_HOST_APPLE)
|
||||
if(CI)
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT ON)
|
||||
else()
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT})
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT})
|
||||
|
||||
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
|
||||
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})
|
||||
|
||||
@@ -260,7 +260,6 @@ _bun_pm_completion() {
|
||||
'hash\:"generate & print the hash of the current lockfile" '
|
||||
'hash-string\:"print the string used to hash the lockfile" '
|
||||
'hash-print\:"print the hash stored in the current lockfile" '
|
||||
'audit\:"run a security audit of dependencies in Bun'\''s lockfile"'
|
||||
'cache\:"print the path to the cache folder" '
|
||||
)
|
||||
|
||||
@@ -540,6 +539,7 @@ _bun_update_completion() {
|
||||
'--save[Save to package.json]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--latest[Updates dependencies to latest version, regardless of compatibility]' \
|
||||
'-f[Always request the latest versions from the registry & reinstall all dependencies]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependencies]' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
@@ -573,7 +573,7 @@ _bun_outdated_completion() {
|
||||
'--no-progress[Disable the progress bar]' \
|
||||
'--help[Print this help menu]' &&
|
||||
ret=0
|
||||
|
||||
|
||||
case $state in
|
||||
config)
|
||||
_bun_list_bunfig_toml
|
||||
|
||||
@@ -175,6 +175,7 @@ Bun.hash.xxHash3("data", 1234);
|
||||
Bun.hash.murmur32v3("data", 1234);
|
||||
Bun.hash.murmur32v2("data", 1234);
|
||||
Bun.hash.murmur64v2("data", 1234);
|
||||
Bun.hash.rapidhash("data", 1234);
|
||||
```
|
||||
|
||||
## `Bun.CryptoHasher`
|
||||
|
||||
@@ -326,7 +326,11 @@ Bun.serve({
|
||||
|
||||
### HTML imports
|
||||
|
||||
To add a client-side single-page app, you can use an HTML import:
|
||||
Bun supports importing HTML files directly into your server code, enabling full-stack applications with both server-side and client-side code. HTML imports work in two modes:
|
||||
|
||||
**Development (`bun --hot`):** Assets are bundled on-demand at runtime, enabling hot module replacement (HMR) for a fast, iterative development experience. When you change your frontend code, the browser automatically updates without a full page reload.
|
||||
|
||||
**Production (`bun build`):** When building with `bun build --target=bun`, the `import index from "./index.html"` statement resolves to a pre-built manifest object containing all bundled client assets. `Bun.serve` consumes this manifest to serve optimized assets with zero runtime bundling overhead. This is ideal for deploying to production.
|
||||
|
||||
```ts
|
||||
import myReactSinglePageApp from "./index.html";
|
||||
@@ -338,9 +342,9 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
HTML imports don't just serve HTML. It's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser.
|
||||
HTML imports don't just serve HTML — it's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser. You can use this to build full-featured frontends with React, TypeScript, Tailwind CSS, and more.
|
||||
|
||||
You can use this to build a full-featured frontend with React, TypeScript, Tailwind CSS, and more. Check out [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack) to learn more.
|
||||
For a complete guide on building full-stack applications with HTML imports, including detailed examples and best practices, see [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack).
|
||||
|
||||
### Practical example: REST API
|
||||
|
||||
|
||||
@@ -582,11 +582,11 @@ Compresses a `Uint8Array` using zlib's DEFLATE algorithm.
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
const compressed = Bun.deflateSync(buf);
|
||||
|
||||
buf; // => Uint8Array(25)
|
||||
compressed; // => Uint8Array(10)
|
||||
buf; // => Buffer(500)
|
||||
compressed; // => Uint8Array(12)
|
||||
```
|
||||
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bungzipsync).
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bun-gzipsync).
|
||||
|
||||
## `Bun.inflateSync()`
|
||||
|
||||
|
||||
@@ -126,6 +126,81 @@ The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that erro
|
||||
|
||||
The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time.
|
||||
|
||||
## Full-stack executables
|
||||
|
||||
{% note %}
|
||||
|
||||
New in Bun v1.2.17
|
||||
|
||||
{% /note %}
|
||||
|
||||
Bun's `--compile` flag can create standalone executables that contain both server and client code, making it ideal for full-stack applications. When you import an HTML file in your server code, Bun automatically bundles all frontend assets (JavaScript, CSS, etc.) and embeds them into the executable. When Bun sees the HTML import on the server, it kicks off a frontend build process to bundle JavaScript, CSS, and other assets.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#server.ts
|
||||
import { serve } from "bun";
|
||||
import index from "./index.html";
|
||||
|
||||
const server = serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/hello": { GET: () => Response.json({ message: "Hello from API" }) },
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Server running at http://localhost:${server.port}`);
|
||||
```
|
||||
|
||||
```html#index.html
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>My App</title>
|
||||
<link rel="stylesheet" href="./styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<script src="./app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
```js#app.js
|
||||
console.log("Hello from the client!");
|
||||
```
|
||||
|
||||
```css#styles.css
|
||||
body {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
To build this into a single executable:
|
||||
|
||||
```sh
|
||||
bun build --compile ./server.ts --outfile myapp
|
||||
```
|
||||
|
||||
This creates a self-contained binary that includes:
|
||||
|
||||
- Your server code
|
||||
- The Bun runtime
|
||||
- All frontend assets (HTML, CSS, JavaScript)
|
||||
- Any npm packages used by your server
|
||||
|
||||
The result is a single file that can be deployed anywhere without needing Node.js, Bun, or any dependencies installed. Just run:
|
||||
|
||||
```sh
|
||||
./myapp
|
||||
```
|
||||
|
||||
Bun automatically handles serving the frontend assets with proper MIME types and cache headers. The HTML import is replaced with a manifest object that `Bun.serve` uses to efficiently serve pre-bundled assets.
|
||||
|
||||
For more details on building full-stack applications with Bun, see the [full-stack guide](/docs/bundler/fullstack).
|
||||
|
||||
## Worker
|
||||
|
||||
To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments:
|
||||
@@ -174,7 +249,7 @@ $ ./hello
|
||||
|
||||
Standalone executables support embedding files.
|
||||
|
||||
To embed files into an executable with `bun build --compile`, import the file in your code
|
||||
To embed files into an executable with `bun build --compile`, import the file in your code.
|
||||
|
||||
```ts
|
||||
// this becomes an internal file path
|
||||
@@ -353,5 +428,4 @@ Currently, the `--compile` flag can only accept a single entrypoint at a time an
|
||||
- `--splitting`
|
||||
- `--public-path`
|
||||
- `--target=node` or `--target=browser`
|
||||
- `--format` - always outputs a binary executable. Internally, it's almost esm.
|
||||
- `--no-bundle` - we always bundle everything into the executable.
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
Using `Bun.serve()`'s `routes` option, you can run your frontend and backend in the same app with no extra steps.
|
||||
|
||||
To get started, import HTML files and pass them to the `routes` option in `Bun.serve()`.
|
||||
|
||||
```ts
|
||||
@@ -234,7 +232,92 @@ When `console: true` is set, Bun will stream console logs from the browser to th
|
||||
|
||||
#### Production mode
|
||||
|
||||
When serving your app in production, set `development: false` in `Bun.serve()`.
|
||||
Hot reloading and `development: true` helps you iterate quickly, but in production, your server should be as fast as possible and have as few external dependencies as possible.
|
||||
|
||||
##### Ahead of time bundling (recommended)
|
||||
|
||||
As of Bun v1.2.17, you can use `Bun.build` or `bun build` to bundle your full-stack application ahead of time.
|
||||
|
||||
```sh
|
||||
$ bun build --target=bun --production --outdir=dist ./src/index.ts
|
||||
```
|
||||
|
||||
When Bun's bundler sees an HTML import from server-side code, it will bundle the referenced JavaScript/TypeScript/TSX/JSX and CSS files into a manifest object that Bun.serve() can use to serve the assets.
|
||||
|
||||
```ts
|
||||
import { serve } from "bun";
|
||||
import index from "./index.html";
|
||||
|
||||
serve({
|
||||
routes: { "/": index },
|
||||
});
|
||||
```
|
||||
|
||||
{% details summary="Internally, the `index` variable is a manifest object that looks something like this" %}
|
||||
|
||||
```json
|
||||
{
|
||||
"index": "./index.html",
|
||||
"files": [
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index-f2me3qnf.js",
|
||||
"loader": "js",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "eet6gn75",
|
||||
"content-type": "text/javascript;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index.html",
|
||||
"loader": "html",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "r9njjakd",
|
||||
"content-type": "text/html;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index-gysa5fmk.css",
|
||||
"loader": "css",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "50zb7x61",
|
||||
"content-type": "text/css;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "logo.svg",
|
||||
"path": "./logo-kygw735p.svg",
|
||||
"loader": "file",
|
||||
"isEntry": false,
|
||||
"headers": {
|
||||
"etag": "kygw735p",
|
||||
"content-type": "application/octet-stream"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "react.svg",
|
||||
"path": "./react-ck11dneg.svg",
|
||||
"loader": "file",
|
||||
"isEntry": false,
|
||||
"headers": {
|
||||
"etag": "ck11dneg",
|
||||
"content-type": "application/octet-stream"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
##### Runtime bundling
|
||||
|
||||
When adding a build step is too complicated, you can set `development: false` in `Bun.serve()`.
|
||||
|
||||
- Enable in-memory caching of bundled assets. Bun will bundle assets lazily on the first request to an `.html` file, and cache the result in memory until the server restarts.
|
||||
- Enables `Cache-Control` headers and `ETag` headers
|
||||
@@ -298,7 +381,6 @@ Note: this is currently in `bunfig.toml` to make it possible to know statically
|
||||
Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<link>` tags in HTML files, uses them as entrypoints for [Bun's bundler](/docs/bundler), generates an optimized bundle for the JavaScript/TypeScript/TSX/JSX and CSS files, and serves the result.
|
||||
|
||||
1. **`<script>` processing**
|
||||
|
||||
- Transpiles TypeScript, JSX, and TSX in `<script>` tags
|
||||
- Bundles imported dependencies
|
||||
- Generates sourcemaps for debugging
|
||||
@@ -309,7 +391,6 @@ Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<
|
||||
```
|
||||
|
||||
2. **`<link>` processing**
|
||||
|
||||
- Processes CSS imports and `<link>` tags
|
||||
- Concatenates CSS files
|
||||
- Rewrites `url` and asset paths to include content-addressable hashes in URLs
|
||||
@@ -319,18 +400,15 @@ Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<
|
||||
```
|
||||
|
||||
3. **`<img>` & asset processing**
|
||||
|
||||
- Links to assets are rewritten to include content-addressable hashes in URLs
|
||||
- Small assets in CSS files are inlined into `data:` URLs, reducing the total number of HTTP requests sent over the wire
|
||||
|
||||
4. **Rewrite HTML**
|
||||
|
||||
- Combines all `<script>` tags into a single `<script>` tag with a content-addressable hash in the URL
|
||||
- Combines all `<link>` tags into a single `<link>` tag with a content-addressable hash in the URL
|
||||
- Outputs a new HTML file
|
||||
|
||||
5. **Serve**
|
||||
|
||||
- All the output files from the bundler are exposed as static routes, using the same mechanism internally as when you pass a `Response` object to [`static` in `Bun.serve()`](/docs/api/http#static-routes).
|
||||
|
||||
This works similarly to how [`Bun.build` processes HTML files](/docs/bundler/html).
|
||||
|
||||
@@ -26,6 +26,7 @@ The bundler is a key piece of infrastructure in the JavaScript ecosystem. As a b
|
||||
- **Reducing HTTP requests.** A single package in `node_modules` may consist of hundreds of files, and large applications may have dozens of such dependencies. Loading each of these files with a separate HTTP request becomes untenable very quickly, so bundlers are used to convert our application source code into a smaller number of self-contained "bundles" that can be loaded with a single request.
|
||||
- **Code transforms.** Modern apps are commonly built with languages or tools like TypeScript, JSX, and CSS modules, all of which must be converted into plain JavaScript and CSS before they can be consumed by a browser. The bundler is the natural place to configure these transformations.
|
||||
- **Framework features.** Frameworks rely on bundler plugins & code transformations to implement common patterns like file-system routing, client-server code co-location (think `getServerSideProps` or Remix loaders), and server components.
|
||||
- **Full-stack Applications.** Bun's bundler can handle both server and client code in a single command, enabling optimized production builds and single-file executables. With build-time HTML imports, you can bundle your entire application — frontend assets and backend server — into a single deployable unit.
|
||||
|
||||
Let's jump into the bundler API.
|
||||
|
||||
@@ -324,7 +325,7 @@ Depending on the target, Bun will apply different module resolution rules and op
|
||||
---
|
||||
|
||||
- `bun`
|
||||
- For generating bundles that are intended to be run by the Bun runtime. In many cases, it isn't necessary to bundle server-side code; you can directly execute the source code without modification. However, bundling your server code can reduce startup times and improve running performance.
|
||||
- For generating bundles that are intended to be run by the Bun runtime. In many cases, it isn't necessary to bundle server-side code; you can directly execute the source code without modification. However, bundling your server code can reduce startup times and improve running performance. This is the target to use for building full-stack applications with build-time HTML imports, where both server and client code are bundled together.
|
||||
|
||||
All bundles generated with `target: "bun"` are marked with a special `// @bun` pragma, which indicates to the Bun runtime that there's no need to re-transpile the file before execution.
|
||||
|
||||
|
||||
@@ -262,6 +262,20 @@ Currently, the list of selectors is:
|
||||
- `video[poster]`
|
||||
- `video[src]`
|
||||
|
||||
{% callout %}
|
||||
|
||||
**HTML Loader Behavior in Different Contexts**
|
||||
|
||||
The `html` loader behaves differently depending on how it's used:
|
||||
|
||||
1. **Static Build:** When you run `bun build ./index.html`, Bun produces a static site with all assets bundled and hashed.
|
||||
|
||||
2. **Runtime:** When you run `bun run server.ts` (where `server.ts` imports an HTML file), Bun bundles assets on-the-fly during development, enabling features like hot module replacement.
|
||||
|
||||
3. **Full-stack Build:** When you run `bun build --target=bun server.ts` (where `server.ts` imports an HTML file), the import resolves to a manifest object that `Bun.serve` uses to efficiently serve pre-bundled assets in production.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
### `sh` loader
|
||||
|
||||
**Bun Shell loader**. Default for `.sh` files
|
||||
|
||||
@@ -125,7 +125,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
- `--target`
|
||||
- n/a
|
||||
- No supported. Bun's bundler performs no syntactic down-leveling at this time.
|
||||
- Not supported. Bun's bundler performs no syntactic down-leveling at this time.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -308,14 +308,12 @@ IF remote template
|
||||
1. GET `registry.npmjs.org/@bun-examples/${template}/latest` and parse it
|
||||
2. GET `registry.npmjs.org/@bun-examples/${template}/-/${template}-${latestVersion}.tgz`
|
||||
3. Decompress & extract `${template}-${latestVersion}.tgz` into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
IF GitHub repo
|
||||
|
||||
1. Download the tarball from GitHub’s API
|
||||
2. Decompress & extract into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
ELSE IF local template
|
||||
@@ -333,7 +331,6 @@ ELSE IF local template
|
||||
7. Run `${npmClient} install` unless `--no-install` is passed OR no dependencies are in package.json
|
||||
8. Run any tasks defined in `"bun-create": { "postinstall" }` with the npm client
|
||||
9. Run `git init; git add -A .; git commit -am "Initial Commit";`
|
||||
|
||||
- Rename `gitignore` to `.gitignore`. NPM automatically removes `.gitignore` files from appearing in packages.
|
||||
- If there are dependencies, this runs in a separate thread concurrently while node_modules are being installed
|
||||
- Using libgit2 if available was tested and performed 3x slower in microbenchmarks
|
||||
|
||||
65
docs/cli/info.md
Normal file
65
docs/cli/info.md
Normal file
@@ -0,0 +1,65 @@
|
||||
`bun info` displays package metadata from the npm registry.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
$ bun info react
|
||||
```
|
||||
|
||||
This will display information about the `react` package, including its latest version, description, homepage, dependencies, and more.
|
||||
|
||||
## Viewing specific versions
|
||||
|
||||
To view information about a specific version:
|
||||
|
||||
```bash
|
||||
$ bun info react@18.0.0
|
||||
```
|
||||
|
||||
## Viewing specific properties
|
||||
|
||||
You can also query specific properties from the package metadata:
|
||||
|
||||
```bash
|
||||
$ bun info react version
|
||||
$ bun info react dependencies
|
||||
$ bun info react repository.url
|
||||
```
|
||||
|
||||
## JSON output
|
||||
|
||||
To get the output in JSON format, use the `--json` flag:
|
||||
|
||||
```bash
|
||||
$ bun info react --json
|
||||
```
|
||||
|
||||
## Alias
|
||||
|
||||
`bun pm view` is an alias for `bun info`:
|
||||
|
||||
```bash
|
||||
$ bun pm view react # equivalent to: bun info react
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
# View basic package information
|
||||
$ bun info is-number
|
||||
|
||||
# View a specific version
|
||||
$ bun info is-number@7.0.0
|
||||
|
||||
# View all available versions
|
||||
$ bun info is-number versions
|
||||
|
||||
# View package dependencies
|
||||
$ bun info express dependencies
|
||||
|
||||
# View package homepage
|
||||
$ bun info lodash homepage
|
||||
|
||||
# Get JSON output
|
||||
$ bun info react --json
|
||||
```
|
||||
@@ -223,7 +223,16 @@ For convenience, here are download links for the latest version:
|
||||
|
||||
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
### CPU requirements and `baseline` builds
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install script automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
|
||||
| Build | Intel requirement | AMD requirement |
|
||||
| ------------ | ------------------------------------------------------------------ | ------------------ |
|
||||
| x64 | Haswell (4th generation Core) or newer, except some low-end models | Excavator or newer |
|
||||
| x64-baseline | Nehalem (1st generation Core) or newer | Bulldozer or newer |
|
||||
|
||||
Bun does not currently support any CPUs older than the `baseline` target, which mandates the SSE4.2 extension.
|
||||
|
||||
Bun also publishes `darwin-x64-baseline` binaries, but these are just a copy of the `darwin-x64` ones so they still have the same CPU requirement. We only maintain these since some tools expect them to exist. Bun requires macOS 13.0 or later, which does not support any CPUs that don't meet our requirement.
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ The `define` field allows you to replace certain global identifiers with constan
|
||||
|
||||
### `loader`
|
||||
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun. If
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun.
|
||||
|
||||
```toml
|
||||
[loader]
|
||||
@@ -382,6 +382,17 @@ registry = { url = "https://registry.npmjs.org", token = "123456" }
|
||||
registry = "https://username:password@registry.npmjs.org"
|
||||
```
|
||||
|
||||
### `install.linkWorkspacePackages`
|
||||
|
||||
To configure how workspace packages are linked, use the `install.linkWorkspacePackages` option.
|
||||
|
||||
Whether to link workspace packages from the monorepo root to their respective `node_modules` directories. Default `true`.
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linkWorkspacePackages = true
|
||||
```
|
||||
|
||||
### `install.scopes`
|
||||
|
||||
To configure a registry for a particular scope (e.g. `@myorg/<package>`) use `install.scopes`. You can reference environment variables with `$variable` notation.
|
||||
|
||||
@@ -102,7 +102,7 @@ Once the plugin is registered, `.yaml` and `.yml` files can be directly imported
|
||||
{% codetabs %}
|
||||
|
||||
```ts#index.ts
|
||||
import data from "./data.yml"
|
||||
import * as data from "./data.yml"
|
||||
|
||||
console.log(data);
|
||||
```
|
||||
|
||||
@@ -206,13 +206,11 @@ Understanding how `mock.module()` works helps you use it more effectively:
|
||||
2. **Lazy Evaluation**: The mock factory callback is only evaluated when the module is actually imported or required.
|
||||
|
||||
3. **Path Resolution**: Bun automatically resolves the module specifier as though you were doing an import, supporting:
|
||||
|
||||
- Relative paths (`'./module'`)
|
||||
- Absolute paths (`'/path/to/module'`)
|
||||
- Package names (`'lodash'`)
|
||||
|
||||
4. **Import Timing Effects**:
|
||||
|
||||
- When mocking before first import: No side effects from the original module occur
|
||||
- When mocking after import: The original module's side effects have already happened
|
||||
- For this reason, using `--preload` is recommended for mocks that need to prevent side effects
|
||||
|
||||
@@ -17,6 +17,7 @@ console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
|
||||
19
misctools/lldb/init.lldb
Normal file
19
misctools/lldb/init.lldb
Normal file
@@ -0,0 +1,19 @@
|
||||
# This file is separate from .lldbinit because it has to be in the same directory as the Python
|
||||
# modules in order for the "attach" action to work.
|
||||
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import -c lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import -c lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.16",
|
||||
"version": "1.2.18",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
@@ -24,8 +24,9 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun run build:debug",
|
||||
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"ci": "bun scripts/buildkite-failures.ts ",
|
||||
"watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
|
||||
@@ -743,7 +743,7 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
source,
|
||||
request,
|
||||
// It is theoretically possible for a breakpoint to resolve to multiple locations.
|
||||
// In that case, send a seperate `breakpoint` event for each one, excluding the first.
|
||||
// In that case, send a separate `breakpoint` event for each one, excluding the first.
|
||||
notify: i > 0,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -850,7 +850,7 @@ const Summary = ({ errorCount, onClose }: { errorCount: number; onClose: () => v
|
||||
|
||||
<a href="https://bun.sh/discord" target="_blank" className="BunError-Summary-help">
|
||||
<svg width="18" viewBox="0 0 71 55" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<g clipPath="url(#clip0)">
|
||||
<path
|
||||
d="M60.1045 4.8978C55.5792 2.8214 50.7265 1.2916 45.6527 0.41542C45.5603 0.39851 45.468 0.440769 45.4204 0.525289C44.7963 1.6353 44.105 3.0834 43.6209 4.2216C38.1637 3.4046 32.7345 3.4046 27.3892 4.2216C26.905 3.0581 26.1886 1.6353 25.5617 0.525289C25.5141 0.443589 25.4218 0.40133 25.3294 0.41542C20.2584 1.2888 15.4057 2.8186 10.8776 4.8978C10.8384 4.9147 10.8048 4.9429 10.7825 4.9795C1.57795 18.7309 -0.943561 32.1443 0.293408 45.3914C0.299005 45.4562 0.335386 45.5182 0.385761 45.5576C6.45866 50.0174 12.3413 52.7249 18.1147 54.5195C18.2071 54.5477 18.305 54.5139 18.3638 54.4378C19.7295 52.5728 20.9469 50.6063 21.9907 48.5383C22.0523 48.4172 21.9935 48.2735 21.8676 48.2256C19.9366 47.4931 18.0979 46.6 16.3292 45.5858C16.1893 45.5041 16.1781 45.304 16.3068 45.2082C16.679 44.9293 17.0513 44.6391 17.4067 44.3461C17.471 44.2926 17.5606 44.2813 17.6362 44.3151C29.2558 49.6202 41.8354 49.6202 53.3179 44.3151C53.3935 44.2785 53.4831 44.2898 53.5502 44.3433C53.9057 44.6363 54.2779 44.9293 54.6529 45.2082C54.7816 45.304 54.7732 45.5041 54.6333 45.5858C52.8646 46.6197 51.0259 47.4931 49.0921 48.2228C48.9662 48.2707 48.9102 48.4172 48.9718 48.5383C50.038 50.6034 51.2554 52.5699 52.5959 54.435C52.6519 54.5139 52.7526 54.5477 52.845 54.5195C58.6464 52.7249 64.529 50.0174 70.6019 45.5576C70.6551 45.5182 70.6887 45.459 70.6943 45.3942C72.1747 30.0791 68.2147 16.7757 60.1968 4.9823C60.1772 4.9429 60.1437 4.9147 60.1045 4.8978ZM23.7259 37.3253C20.2276 37.3253 17.3451 34.1136 17.3451 30.1693C17.3451 26.225 20.1717 23.0133 23.7259 23.0133C27.308 23.0133 30.1626 26.2532 30.1066 30.1693C30.1066 34.1136 27.28 37.3253 23.7259 37.3253ZM47.3178 37.3253C43.8196 37.3253 40.9371 34.1136 40.9371 30.1693C40.9371 26.225 43.7636 23.0133 47.3178 23.0133C50.9 23.0133 53.7545 26.2532 53.6986 30.1693C53.6986 34.1136 50.9 37.3253 47.3178 37.3253Z"
|
||||
fill="#5865F2"
|
||||
|
||||
@@ -72,6 +72,7 @@ async function buildRootModule(dryRun?: boolean) {
|
||||
},
|
||||
});
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(join(cwd, "bin", "bunx.exe"), "");
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
@@ -105,7 +106,7 @@ without *requiring* a postinstall script.
|
||||
),
|
||||
bin: {
|
||||
bun: "bin/bun.exe",
|
||||
bunx: "bin/bun.exe",
|
||||
bunx: "bin/bunx.exe",
|
||||
},
|
||||
os,
|
||||
cpu,
|
||||
|
||||
@@ -157,3 +157,15 @@ export function exists(path: string): boolean {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function link(path: string, newPath: string): void {
|
||||
debug("link", path, newPath);
|
||||
try {
|
||||
fs.unlinkSync(newPath);
|
||||
fs.linkSync(path, newPath);
|
||||
return;
|
||||
} catch (error) {
|
||||
copy(path, newPath);
|
||||
debug("fs.linkSync failed, reverting to copy", error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { unzipSync } from "zlib";
|
||||
import { debug, error } from "../console";
|
||||
import { fetch } from "../fetch";
|
||||
import { chmod, join, rename, rm, tmp, write } from "../fs";
|
||||
import { chmod, join, link, rename, rm, tmp, write } from "../fs";
|
||||
import type { Platform } from "../platform";
|
||||
import { abi, arch, os, supportedPlatforms } from "../platform";
|
||||
import { spawn } from "../spawn";
|
||||
@@ -125,6 +125,7 @@ export function optimizeBun(path: string): void {
|
||||
os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
|
||||
try {
|
||||
rename(path, join(__dirname, "bin", "bun.exe"));
|
||||
link(join(__dirname, "bin", "bun.exe"), join(__dirname, "bin", "bunx.exe"));
|
||||
return;
|
||||
} catch (error) {
|
||||
debug("optimizeBun failed", error);
|
||||
|
||||
@@ -65,13 +65,11 @@ Note: The order of references in `index.d.ts` is important - `bun.ns.d.ts` must
|
||||
### Best Practices
|
||||
|
||||
1. **Type Safety**
|
||||
|
||||
- Please use strict types instead of `any` where possible
|
||||
- Leverage TypeScript's type system features (generics, unions, etc.)
|
||||
- Document complex types with JSDoc comments
|
||||
|
||||
2. **Compatibility**
|
||||
|
||||
- Use `Bun.__internal.UseLibDomIfAvailable<LibDomName extends string, OurType>` for types that might conflict with lib.dom.d.ts (see [`./fetch.d.ts`](./fetch.d.ts) for a real example)
|
||||
- `@types/node` often expects variables to always be defined (this was the biggest cause of most of the conflicts in the past!), so we use the `UseLibDomIfAvailable` type to make sure we don't overwrite `lib.dom.d.ts` but still provide Bun types while simultaneously declaring the variable exists (for Node to work) in the cases that we can.
|
||||
|
||||
|
||||
186
packages/bun-types/bun.d.ts
vendored
186
packages/bun-types/bun.d.ts
vendored
@@ -1126,6 +1126,7 @@ declare module "bun" {
|
||||
* This will be used by fetch() and Bun.connect() to avoid DNS lookups.
|
||||
*
|
||||
* @param hostname The hostname to prefetch
|
||||
* @param port The port to prefetch. Default is 443. Port helps distinguish between IPv6 vs IPv4-only connections.
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
@@ -1135,7 +1136,7 @@ declare module "bun" {
|
||||
* await fetch('https://example.com');
|
||||
* ```
|
||||
*/
|
||||
function prefetch(hostname: string): void;
|
||||
function prefetch(hostname: string, port?: number): void;
|
||||
|
||||
/**
|
||||
* **Experimental API**
|
||||
@@ -1865,6 +1866,7 @@ declare module "bun" {
|
||||
murmur32v3: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur32v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur64v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
rapidhash: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
}
|
||||
|
||||
type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
|
||||
@@ -4922,7 +4924,7 @@ declare module "bun" {
|
||||
*
|
||||
* @param force Synchronously run the garbage collector
|
||||
*/
|
||||
function gc(force: boolean): void;
|
||||
function gc(force?: boolean): void;
|
||||
|
||||
/**
|
||||
* JavaScriptCore engine's internal heap snapshot
|
||||
@@ -5871,31 +5873,76 @@ declare module "bun" {
|
||||
index: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a TCP or TLS socket connection used for network communication.
|
||||
* This interface provides methods for reading, writing, managing the connection state,
|
||||
* and handling TLS-specific features if applicable.
|
||||
*
|
||||
* Sockets are created using `Bun.connect()` or accepted by a `Bun.listen()` server.
|
||||
*
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
interface Socket<Data = undefined> extends Disposable {
|
||||
/**
|
||||
* Write `data` to the socket
|
||||
* Writes `data` to the socket. This method is unbuffered and non-blocking. This uses the `sendto(2)` syscall internally.
|
||||
*
|
||||
* @param data The data to write to the socket
|
||||
* @param byteOffset The offset in the buffer to start writing from (defaults to 0)
|
||||
* @param byteLength The number of bytes to write (defaults to the length of the buffer)
|
||||
* For optimal performance with multiple small writes, consider batching multiple
|
||||
* writes together into a single `socket.write()` call.
|
||||
*
|
||||
* When passed a string, `byteOffset` and `byteLength` refer to the UTF-8 offset, not the string character offset.
|
||||
* @param data The data to write. Can be a string (encoded as UTF-8), `ArrayBuffer`, `TypedArray`, or `DataView`.
|
||||
* @param byteOffset The offset in bytes within the buffer to start writing from. Defaults to 0. Ignored for strings.
|
||||
* @param byteLength The number of bytes to write from the buffer. Defaults to the remaining length of the buffer from the offset. Ignored for strings.
|
||||
* @returns The number of bytes written. Returns `-1` if the socket is closed or shutting down. Can return less than the input size if the socket's buffer is full (backpressure).
|
||||
* @example
|
||||
* ```ts
|
||||
* // Send a string
|
||||
* const bytesWritten = socket.write("Hello, world!\n");
|
||||
*
|
||||
* This is unbuffered as of Bun v0.2.2. That means individual write() calls
|
||||
* will be slow. In the future, Bun will buffer writes and flush them at the
|
||||
* end of the tick, when the event loop is idle, or sooner if the buffer is full.
|
||||
* // Send binary data
|
||||
* const buffer = new Uint8Array([0x01, 0x02, 0x03]);
|
||||
* socket.write(buffer);
|
||||
*
|
||||
* // Send part of a buffer
|
||||
* const largeBuffer = new Uint8Array(1024);
|
||||
* // ... fill largeBuffer ...
|
||||
* socket.write(largeBuffer, 100, 50); // Write 50 bytes starting from index 100
|
||||
* ```
|
||||
*/
|
||||
write(data: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
/**
|
||||
* The data context for the socket.
|
||||
* The user-defined data associated with this socket instance.
|
||||
* This can be set when the socket is created via `Bun.connect({ data: ... })`.
|
||||
* It can be read or updated at any time.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // In a socket handler
|
||||
* function open(socket: Socket<{ userId: string }>) {
|
||||
* console.log(`Socket opened for user: ${socket.data.userId}`);
|
||||
* socket.data.lastActivity = Date.now(); // Update data
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
data: Data;
|
||||
|
||||
/**
|
||||
* Like {@link Socket.write} except it includes a TCP FIN packet
|
||||
* Sends the final data chunk and initiates a graceful shutdown of the socket's write side.
|
||||
* After calling `end()`, no more data can be written using `write()` or `end()`.
|
||||
* The socket remains readable until the remote end also closes its write side or the connection is terminated.
|
||||
* This sends a TCP FIN packet after writing the data.
|
||||
*
|
||||
* Use it to send your last message and close the connection.
|
||||
* @param data Optional final data to write before closing. Same types as `write()`.
|
||||
* @param byteOffset Optional offset for buffer data.
|
||||
* @param byteLength Optional length for buffer data.
|
||||
* @returns The number of bytes written for the final chunk. Returns `-1` if the socket was already closed or shutting down.
|
||||
* @example
|
||||
* ```ts
|
||||
* // send some data and close the write side
|
||||
* socket.end("Goodbye!");
|
||||
* // or close write side without sending final data
|
||||
* socket.end();
|
||||
* ```
|
||||
*/
|
||||
end(data?: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
@@ -5922,20 +5969,33 @@ declare module "bun" {
|
||||
timeout(seconds: number): void;
|
||||
|
||||
/**
|
||||
* Forcefully close the socket. The other end may not receive all data, and
|
||||
* the socket will be closed immediately.
|
||||
* Forcefully closes the socket connection immediately. This is an abrupt termination, unlike the graceful shutdown initiated by `end()`.
|
||||
* It uses `SO_LINGER` with `l_onoff=1` and `l_linger=0` before calling `close(2)`.
|
||||
* Consider using {@link close close()} or {@link end end()} for graceful shutdowns.
|
||||
*
|
||||
* This passes `SO_LINGER` with `l_onoff` set to `1` and `l_linger` set to
|
||||
* `0` and then calls `close(2)`.
|
||||
* @example
|
||||
* ```ts
|
||||
* socket.terminate();
|
||||
* ```
|
||||
*/
|
||||
terminate(): void;
|
||||
|
||||
/**
|
||||
* Shutdown writes to a socket
|
||||
* Shuts down the write-half or both halves of the connection.
|
||||
* This allows the socket to enter a half-closed state where it can still receive data
|
||||
* but can no longer send data (`halfClose = true`), or close both read and write
|
||||
* (`halfClose = false`, similar to `end()` but potentially more immediate depending on OS).
|
||||
* Calls `shutdown(2)` syscall internally.
|
||||
*
|
||||
* This makes the socket a half-closed socket. It can still receive data.
|
||||
* @param halfClose If `true`, only shuts down the write side (allows receiving). If `false` or omitted, shuts down both read and write. Defaults to `false`.
|
||||
* @example
|
||||
* ```ts
|
||||
* // Stop sending data, but allow receiving
|
||||
* socket.shutdown(true);
|
||||
*
|
||||
* This calls [shutdown(2)](https://man7.org/linux/man-pages/man2/shutdown.2.html) internally
|
||||
* // Shutdown both reading and writing
|
||||
* socket.shutdown();
|
||||
* ```
|
||||
*/
|
||||
shutdown(halfClose?: boolean): void;
|
||||
|
||||
@@ -5961,6 +6021,11 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Flush any buffered data to the socket
|
||||
* This attempts to send the data immediately, but success depends on the network conditions
|
||||
* and the receiving end.
|
||||
* It might be necessary after several `write` calls if immediate sending is critical,
|
||||
* though often the OS handles flushing efficiently. Note that `write` calls outside
|
||||
* `open`/`data`/`drain` might benefit from manual `cork`/`flush`.
|
||||
*/
|
||||
flush(): void;
|
||||
|
||||
@@ -5982,17 +6047,31 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Remote IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly remoteAddress: string;
|
||||
|
||||
/**
|
||||
* Remote port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly remotePort: number;
|
||||
|
||||
/**
|
||||
* IP protocol family used for the local endpoint of the socket
|
||||
* @example "IPv4" | "IPv6"
|
||||
*/
|
||||
readonly localFamily: "IPv4" | "IPv6";
|
||||
|
||||
/**
|
||||
* Local IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly localAddress: string;
|
||||
|
||||
/**
|
||||
* local port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly localPort: number;
|
||||
|
||||
@@ -6156,6 +6235,8 @@ declare module "bun" {
|
||||
/**
|
||||
* See `Session Resumption` for more information.
|
||||
* @return `true` if the session was reused, `false` otherwise.
|
||||
* **TLS Only:** Checks if the current TLS session was resumed from a previous session.
|
||||
* Returns `true` if the session was resumed, `false` otherwise.
|
||||
*/
|
||||
isSessionReused(): boolean;
|
||||
|
||||
@@ -6198,30 +6279,91 @@ declare module "bun" {
|
||||
setKeepAlive(enable?: boolean, initialDelay?: number): boolean;
|
||||
|
||||
/**
|
||||
* The number of bytes written to the socket.
|
||||
* The total number of bytes successfully written to the socket since it was established.
|
||||
* This includes data currently buffered by the OS but not yet acknowledged by the remote peer.
|
||||
*/
|
||||
readonly bytesWritten: number;
|
||||
|
||||
/**
|
||||
* Alias for `socket.end()`. Allows the socket to be used with `using` declarations
|
||||
* for automatic resource management.
|
||||
* @example
|
||||
* ```ts
|
||||
* async function processSocket() {
|
||||
* using socket = await Bun.connect({ ... });
|
||||
* socket.write("Data");
|
||||
* // socket.end() is called automatically when exiting the scope
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
resume(): void;
|
||||
|
||||
pause(): void;
|
||||
|
||||
/**
|
||||
* If this is a TLS Socket
|
||||
*/
|
||||
renegotiate(): void;
|
||||
|
||||
/**
|
||||
* Sets the verify mode of the socket.
|
||||
*
|
||||
* @param requestCert Whether to request a certificate.
|
||||
* @param rejectUnauthorized Whether to reject unauthorized certificates.
|
||||
*/
|
||||
setVerifyMode(requestCert: boolean, rejectUnauthorized: boolean): void;
|
||||
|
||||
getSession(): void;
|
||||
|
||||
/**
|
||||
* Sets the session of the socket.
|
||||
*
|
||||
* @param session The session to set.
|
||||
*/
|
||||
setSession(session: string | Buffer | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Exports the keying material of the socket.
|
||||
*
|
||||
* @param length The length of the keying material to export.
|
||||
* @param label The label of the keying material to export.
|
||||
* @param context The context of the keying material to export.
|
||||
*/
|
||||
exportKeyingMaterial(length: number, label: string, context?: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Upgrades the socket to a TLS socket.
|
||||
*
|
||||
* @param options The options for the upgrade.
|
||||
* @returns A tuple containing the raw socket and the TLS socket.
|
||||
* @see {@link TLSUpgradeOptions}
|
||||
*/
|
||||
upgradeTLS<Data>(options: TLSUpgradeOptions<Data>): [raw: Socket<Data>, tls: Socket<Data>];
|
||||
|
||||
/**
|
||||
* Closes the socket.
|
||||
*
|
||||
* This is a wrapper around `end()` and `shutdown()`.
|
||||
*
|
||||
* @see {@link end}
|
||||
* @see {@link shutdown}
|
||||
*/
|
||||
close(): void;
|
||||
|
||||
/**
|
||||
* Returns the servername of the socket.
|
||||
*
|
||||
* @see {@link setServername}
|
||||
*/
|
||||
getServername(): string;
|
||||
|
||||
/**
|
||||
* Sets the servername of the socket.
|
||||
*
|
||||
* @see {@link getServername}
|
||||
*/
|
||||
setServername(name: string): void;
|
||||
}
|
||||
|
||||
@@ -6709,7 +6851,7 @@ declare module "bun" {
|
||||
* incoming messages, and `subprocess.send` can send messages to the subprocess. Messages are serialized
|
||||
* using the JSC serialize API, which allows for the same types that `postMessage`/`structuredClone` supports.
|
||||
*
|
||||
* The subprocess can send and recieve messages by using `process.send` and `process.on("message")`,
|
||||
* The subprocess can send and receive messages by using `process.send` and `process.on("message")`,
|
||||
* respectively. This is the same API as what Node.js exposes when `child_process.fork()` is used.
|
||||
*
|
||||
* Currently, this is only compatible with processes that are other `bun` instances.
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
"files": [
|
||||
"./*.d.ts",
|
||||
"docs/**/*.md",
|
||||
"docs/*.md"
|
||||
"docs/*.md",
|
||||
"CLAUDE.md",
|
||||
"README.md"
|
||||
],
|
||||
"homepage": "https://bun.sh",
|
||||
"dependencies": {
|
||||
@@ -23,7 +25,7 @@
|
||||
"scripts": {
|
||||
"prebuild": "echo $(pwd)",
|
||||
"copy-docs": "rm -rf docs && cp -rL ../../docs/ ./docs && find ./docs -type f -name '*.md' -exec sed -i 's/\\$BUN_LATEST_VERSION/'\"${BUN_VERSION#bun-v}\"'/g' {} +",
|
||||
"build": "bun run copy-docs && bun scripts/build.ts",
|
||||
"build": "bun run copy-docs && cp ../../src/init/rule.md CLAUDE.md && bun scripts/build.ts",
|
||||
"test": "tsc",
|
||||
"fmt": "echo $(which biome) && biome format --write ."
|
||||
},
|
||||
|
||||
168
packages/bun-types/redis.d.ts
vendored
168
packages/bun-types/redis.d.ts
vendored
@@ -50,6 +50,10 @@ declare module "bun" {
|
||||
enableAutoPipelining?: boolean;
|
||||
}
|
||||
|
||||
export namespace RedisClient {
|
||||
type KeyLike = string | ArrayBufferView | Blob;
|
||||
}
|
||||
|
||||
export class RedisClient {
|
||||
/**
|
||||
* Creates a new Redis client
|
||||
@@ -112,14 +116,14 @@ declare module "bun" {
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the key's value as a string, or null if the key doesn't exist
|
||||
*/
|
||||
get(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
get(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key as a Uint8Array
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the key's value as a Uint8Array, or null if the key doesn't exist
|
||||
*/
|
||||
getBuffer(key: string | ArrayBufferView | Blob): Promise<Uint8Array<ArrayBuffer> | null>;
|
||||
getBuffer(key: RedisClient.KeyLike): Promise<Uint8Array<ArrayBuffer> | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value
|
||||
@@ -127,7 +131,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration
|
||||
@@ -136,12 +140,7 @@ declare module "bun" {
|
||||
* @param ex Set the specified expire time, in seconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
ex: "EX",
|
||||
seconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, ex: "EX", seconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration
|
||||
@@ -150,12 +149,7 @@ declare module "bun" {
|
||||
* @param px Set the specified expire time, in milliseconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
px: "PX",
|
||||
milliseconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, px: "PX", milliseconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration at a specific Unix timestamp
|
||||
@@ -164,12 +158,7 @@ declare module "bun" {
|
||||
* @param exat Set the specified Unix time at which the key will expire, in seconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
exat: "EXAT",
|
||||
timestampSeconds: number,
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, exat: "EXAT", timestampSeconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration at a specific Unix timestamp
|
||||
@@ -179,8 +168,8 @@ declare module "bun" {
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
key: RedisClient.KeyLike,
|
||||
value: RedisClient.KeyLike,
|
||||
pxat: "PXAT",
|
||||
timestampMilliseconds: number,
|
||||
): Promise<"OK">;
|
||||
@@ -192,7 +181,7 @@ declare module "bun" {
|
||||
* @param nx Only set the key if it does not already exist
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key already exists
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob, nx: "NX"): Promise<"OK" | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, nx: "NX"): Promise<"OK" | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value only if key already exists
|
||||
@@ -201,7 +190,7 @@ declare module "bun" {
|
||||
* @param xx Only set the key if it already exists
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key does not exist
|
||||
*/
|
||||
set(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob, xx: "XX"): Promise<"OK" | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, xx: "XX"): Promise<"OK" | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value and return the old value
|
||||
@@ -210,11 +199,7 @@ declare module "bun" {
|
||||
* @param get Return the old string stored at key, or null if key did not exist
|
||||
* @returns Promise that resolves with the old value, or null if key did not exist
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
get: "GET",
|
||||
): Promise<string | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, get: "GET"): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value and retain the time to live
|
||||
@@ -223,11 +208,7 @@ declare module "bun" {
|
||||
* @param keepttl Retain the time to live associated with the key
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
keepttl: "KEEPTTL",
|
||||
): Promise<"OK">;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, keepttl: "KEEPTTL"): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with various options
|
||||
@@ -236,39 +217,35 @@ declare module "bun" {
|
||||
* @param options Array of options (EX, PX, EXAT, PXAT, NX, XX, KEEPTTL, GET)
|
||||
* @returns Promise that resolves with "OK" on success, null if NX/XX condition not met, or the old value if GET is specified
|
||||
*/
|
||||
set(
|
||||
key: string | ArrayBufferView | Blob,
|
||||
value: string | ArrayBufferView | Blob,
|
||||
...options: string[]
|
||||
): Promise<"OK" | string | null>;
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, ...options: string[]): Promise<"OK" | string | null>;
|
||||
|
||||
/**
|
||||
* Delete a key
|
||||
* @param key The key to delete
|
||||
* Delete a key(s)
|
||||
* @param keys The keys to delete
|
||||
* @returns Promise that resolves with the number of keys removed
|
||||
*/
|
||||
del(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
del(...keys: RedisClient.KeyLike[]): Promise<number>;
|
||||
|
||||
/**
|
||||
* Increment the integer value of a key by one
|
||||
* @param key The key to increment
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
incr(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
incr(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Decrement the integer value of a key by one
|
||||
* @param key The key to decrement
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
decr(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
decr(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Determine if a key exists
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with true if the key exists, false otherwise
|
||||
*/
|
||||
exists(key: string | ArrayBufferView | Blob): Promise<boolean>;
|
||||
exists(key: RedisClient.KeyLike): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Set a key's time to live in seconds
|
||||
@@ -276,14 +253,14 @@ declare module "bun" {
|
||||
* @param seconds The number of seconds until expiration
|
||||
* @returns Promise that resolves with 1 if the timeout was set, 0 if not
|
||||
*/
|
||||
expire(key: string | ArrayBufferView | Blob, seconds: number): Promise<number>;
|
||||
expire(key: RedisClient.KeyLike, seconds: number): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the time to live for a key in seconds
|
||||
* @param key The key to get the TTL for
|
||||
* @returns Promise that resolves with the TTL, -1 if no expiry, or -2 if key doesn't exist
|
||||
*/
|
||||
ttl(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
ttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set multiple hash fields to multiple values
|
||||
@@ -291,7 +268,7 @@ declare module "bun" {
|
||||
* @param fieldValues An array of alternating field names and values
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
hmset(key: string | ArrayBufferView | Blob, fieldValues: string[]): Promise<string>;
|
||||
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get the values of all the given hash fields
|
||||
@@ -299,7 +276,7 @@ declare module "bun" {
|
||||
* @param fields The fields to get
|
||||
* @returns Promise that resolves with an array of values
|
||||
*/
|
||||
hmget(key: string | ArrayBufferView | Blob, fields: string[]): Promise<Array<string | null>>;
|
||||
hmget(key: RedisClient.KeyLike, fields: string[]): Promise<Array<string | null>>;
|
||||
|
||||
/**
|
||||
* Check if a value is a member of a set
|
||||
@@ -307,7 +284,7 @@ declare module "bun" {
|
||||
* @param member The member to check
|
||||
* @returns Promise that resolves with true if the member exists, false otherwise
|
||||
*/
|
||||
sismember(key: string | ArrayBufferView | Blob, member: string): Promise<boolean>;
|
||||
sismember(key: RedisClient.KeyLike, member: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Add a member to a set
|
||||
@@ -315,7 +292,7 @@ declare module "bun" {
|
||||
* @param member The member to add
|
||||
* @returns Promise that resolves with 1 if the member was added, 0 if it already existed
|
||||
*/
|
||||
sadd(key: string | ArrayBufferView | Blob, member: string): Promise<number>;
|
||||
sadd(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove a member from a set
|
||||
@@ -323,28 +300,28 @@ declare module "bun" {
|
||||
* @param member The member to remove
|
||||
* @returns Promise that resolves with 1 if the member was removed, 0 if it didn't exist
|
||||
*/
|
||||
srem(key: string | ArrayBufferView | Blob, member: string): Promise<number>;
|
||||
srem(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get all the members in a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with an array of all members
|
||||
*/
|
||||
smembers(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
smembers(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Get a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
*/
|
||||
srandmember(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
srandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the removed member, or null if the set is empty
|
||||
*/
|
||||
spop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
spop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Increment the integer value of a hash field by the given number
|
||||
@@ -353,7 +330,7 @@ declare module "bun" {
|
||||
* @param increment The amount to increment by
|
||||
* @returns Promise that resolves with the new value
|
||||
*/
|
||||
hincrby(key: string | ArrayBufferView | Blob, field: string, increment: string | number): Promise<number>;
|
||||
hincrby(key: RedisClient.KeyLike, field: string, increment: string | number): Promise<number>;
|
||||
|
||||
/**
|
||||
* Increment the float value of a hash field by the given amount
|
||||
@@ -362,35 +339,35 @@ declare module "bun" {
|
||||
* @param increment The amount to increment by
|
||||
* @returns Promise that resolves with the new value as a string
|
||||
*/
|
||||
hincrbyfloat(key: string | ArrayBufferView | Blob, field: string, increment: string | number): Promise<string>;
|
||||
hincrbyfloat(key: RedisClient.KeyLike, field: string, increment: string | number): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get all the fields and values in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an object containing all fields and values
|
||||
*/
|
||||
hgetall(key: string | ArrayBufferView | Blob): Promise<Record<string, string> | null>;
|
||||
hgetall(key: RedisClient.KeyLike): Promise<Record<string, string> | null>;
|
||||
|
||||
/**
|
||||
* Get all field names in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an array of field names
|
||||
*/
|
||||
hkeys(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
hkeys(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Get the number of fields in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with the number of fields
|
||||
*/
|
||||
hlen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
hlen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get all values in a hash
|
||||
* @param key The hash key
|
||||
* @returns Promise that resolves with an array of values
|
||||
*/
|
||||
hvals(key: string | ArrayBufferView | Blob): Promise<string[]>;
|
||||
hvals(key: RedisClient.KeyLike): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Find all keys matching the given pattern
|
||||
@@ -404,84 +381,84 @@ declare module "bun" {
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the length of the list
|
||||
*/
|
||||
llen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
llen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and get the first element in a list
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the first element, or null if the list is empty
|
||||
*/
|
||||
lpop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
lpop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove the expiration from a key
|
||||
* @param key The key to persist
|
||||
* @returns Promise that resolves with 1 if the timeout was removed, 0 if the key doesn't exist or has no timeout
|
||||
*/
|
||||
persist(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
persist(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pexpiretime(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
pexpiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the time to live for a key in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the TTL in milliseconds, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pttl(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
pttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and get the last element in a list
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the last element, or null if the list is empty
|
||||
*/
|
||||
rpop(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
rpop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the number of members in a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the set
|
||||
*/
|
||||
scard(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
scard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the length of the value stored in a key
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the length of the string value, or 0 if the key doesn't exist
|
||||
*/
|
||||
strlen(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
strlen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the number of members in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the sorted set
|
||||
*/
|
||||
zcard(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
zcard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the highest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
*/
|
||||
zpopmax(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zpopmax(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the lowest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
*/
|
||||
zpopmin(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zpopmin(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get one or multiple random members from a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
*/
|
||||
zrandmember(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
zrandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Append a value to a key
|
||||
@@ -489,7 +466,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the string after the append operation
|
||||
*/
|
||||
append(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
append(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set the value of a key and return its old value
|
||||
@@ -497,7 +474,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with the old value, or null if the key didn't exist
|
||||
*/
|
||||
getset(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getset(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Prepend one or multiple values to a list
|
||||
@@ -505,7 +482,7 @@ declare module "bun" {
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
*/
|
||||
lpush(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
lpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Prepend a value to a list, only if the list exists
|
||||
@@ -513,7 +490,7 @@ declare module "bun" {
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
lpushx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
lpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Add one or more members to a HyperLogLog
|
||||
@@ -521,7 +498,7 @@ declare module "bun" {
|
||||
* @param element The element to add
|
||||
* @returns Promise that resolves with 1 if the HyperLogLog was altered, 0 otherwise
|
||||
*/
|
||||
pfadd(key: string | ArrayBufferView | Blob, element: string): Promise<number>;
|
||||
pfadd(key: RedisClient.KeyLike, element: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Append one or multiple values to a list
|
||||
@@ -529,7 +506,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
*/
|
||||
rpush(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
rpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Append a value to a list, only if the list exists
|
||||
@@ -537,7 +514,7 @@ declare module "bun" {
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
rpushx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
rpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Set the value of a key, only if the key does not exist
|
||||
@@ -545,7 +522,7 @@ declare module "bun" {
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with 1 if the key was set, 0 if the key was not set
|
||||
*/
|
||||
setnx(key: string | ArrayBufferView | Blob, value: string | ArrayBufferView | Blob): Promise<number>;
|
||||
setnx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the score associated with the given member in a sorted set
|
||||
@@ -553,49 +530,62 @@ declare module "bun" {
|
||||
* @param member The member to get the score for
|
||||
* @returns Promise that resolves with the score of the member as a string, or null if the member or key doesn't exist
|
||||
*/
|
||||
zscore(key: string | ArrayBufferView | Blob, member: string): Promise<string | null>;
|
||||
zscore(key: RedisClient.KeyLike, member: string): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all specified keys
|
||||
* @param keys The keys to get
|
||||
* @returns Promise that resolves with an array of values, with null for keys that don't exist
|
||||
*/
|
||||
mget(...keys: (string | ArrayBufferView | Blob)[]): Promise<(string | null)[]>;
|
||||
mget(...keys: RedisClient.KeyLike[]): Promise<(string | null)[]>;
|
||||
|
||||
/**
|
||||
* Count the number of set bits (population counting) in a string
|
||||
* @param key The key to count bits in
|
||||
* @returns Promise that resolves with the number of bits set to 1
|
||||
*/
|
||||
bitcount(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
bitcount(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Return a serialized version of the value stored at the specified key
|
||||
* @param key The key to dump
|
||||
* @returns Promise that resolves with the serialized value, or null if the key doesn't exist
|
||||
*/
|
||||
dump(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
dump(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in seconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
expiretime(key: string | ArrayBufferView | Blob): Promise<number>;
|
||||
expiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and delete the key
|
||||
* @param key The key to get and delete
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getdel(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getdel(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and optionally set its expiration
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
getex(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Ping the server
|
||||
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(): Promise<"PONG">;
|
||||
|
||||
/**
|
||||
* Ping the server with a message
|
||||
* @param message The message to send to the server
|
||||
* @returns Promise that resolves with the message if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(message: RedisClient.KeyLike): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,4 +4,16 @@ import pkg from "../package.json";
|
||||
|
||||
const BUN_VERSION = (process.env.BUN_VERSION || Bun.version || process.versions.bun).replace(/^.*v/, "");
|
||||
|
||||
Bun.write(join(import.meta.dir, "..", "package.json"), JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2));
|
||||
let claude = Bun.file(join(import.meta.dir, "..", "CLAUDE.md"));
|
||||
if (await claude.exists()) {
|
||||
let original = await claude.text();
|
||||
const endOfFrontMatter = original.lastIndexOf("---\n");
|
||||
original = original.replaceAll("node_modules/bun-types/", "");
|
||||
if (endOfFrontMatter > -1) {
|
||||
original = original.slice(endOfFrontMatter + "---\n".length).trim() + "\n";
|
||||
}
|
||||
|
||||
await claude.write(original);
|
||||
}
|
||||
|
||||
await Bun.write(join(import.meta.dir, "..", "package.json"), JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2));
|
||||
|
||||
79
packages/bun-types/sqlite.d.ts
vendored
79
packages/bun-types/sqlite.d.ts
vendored
@@ -764,6 +764,79 @@ declare module "bun:sqlite" {
|
||||
*/
|
||||
readonly paramsCount: number;
|
||||
|
||||
/**
|
||||
* The actual SQLite column types from the first row of the result set.
|
||||
* Useful for expressions and computed columns, which are not covered by `declaredTypes`
|
||||
*
|
||||
* Returns an array of SQLite type constants as uppercase strings:
|
||||
* - `"INTEGER"` for integer values
|
||||
* - `"FLOAT"` for floating-point values
|
||||
* - `"TEXT"` for text values
|
||||
* - `"BLOB"` for binary data
|
||||
* - `"NULL"` for null values
|
||||
* - `null` for unknown/unsupported types
|
||||
*
|
||||
* **Requirements:**
|
||||
* - Only available for read-only statements (SELECT queries)
|
||||
* - For non-read-only statements, throws an error
|
||||
*
|
||||
* **Behavior:**
|
||||
* - Uses `sqlite3_column_type()` to get actual data types from the first row
|
||||
* - Returns `null` for columns with unknown SQLite type constants
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stmt = db.prepare("SELECT id, name, age FROM users WHERE id = 1");
|
||||
*
|
||||
* console.log(stmt.columnTypes);
|
||||
* // => ["INTEGER", "TEXT", "INTEGER"]
|
||||
*
|
||||
* // For expressions:
|
||||
* const exprStmt = db.prepare("SELECT length('bun') AS str_length");
|
||||
* console.log(exprStmt.columnTypes);
|
||||
* // => ["INTEGER"]
|
||||
* ```
|
||||
*
|
||||
* @throws Error if statement is not read-only (INSERT, UPDATE, DELETE, etc.)
|
||||
* @since Bun v1.2.13
|
||||
*/
|
||||
readonly columnTypes: Array<"INTEGER" | "FLOAT" | "TEXT" | "BLOB" | "NULL" | null>;
|
||||
|
||||
/**
|
||||
* The declared column types from the table schema.
|
||||
*
|
||||
* Returns an array of declared type strings from `sqlite3_column_decltype()`:
|
||||
* - Raw type strings as declared in the CREATE TABLE statement
|
||||
* - `null` for columns without declared types (e.g., expressions, computed columns)
|
||||
*
|
||||
* **Requirements:**
|
||||
* - Statement must be executed at least once before accessing this property
|
||||
* - Available for both read-only and read-write statements
|
||||
*
|
||||
* **Behavior:**
|
||||
* - Uses `sqlite3_column_decltype()` to get schema-declared types
|
||||
* - Returns the exact type string from the table definition
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // For table columns:
|
||||
* const stmt = db.prepare("SELECT id, name, weight FROM products");
|
||||
* stmt.get();
|
||||
* console.log(stmt.declaredTypes);
|
||||
* // => ["INTEGER", "TEXT", "REAL"]
|
||||
*
|
||||
* // For expressions (no declared types):
|
||||
* const exprStmt = db.prepare("SELECT length('bun') AS str_length");
|
||||
* exprStmt.get();
|
||||
* console.log(exprStmt.declaredTypes);
|
||||
* // => [null]
|
||||
* ```
|
||||
*
|
||||
* @throws Error if statement hasn't been executed
|
||||
* @since Bun v1.2.13
|
||||
*/
|
||||
readonly declaredTypes: Array<string | null>;
|
||||
|
||||
/**
|
||||
* Finalize the prepared statement, freeing the resources used by the
|
||||
* statement and preventing it from being executed again.
|
||||
@@ -840,6 +913,12 @@ declare module "bun:sqlite" {
|
||||
* Native object representing the underlying `sqlite3_stmt`
|
||||
*
|
||||
* This is left untyped because the ABI of the native bindings may change at any time.
|
||||
*
|
||||
* For stable, typed access to statement metadata, use the typed properties on the Statement class:
|
||||
* - {@link columnNames} for column names
|
||||
* - {@link paramsCount} for parameter count
|
||||
* - {@link columnTypes} for actual data types from the first row
|
||||
* - {@link declaredTypes} for schema-declared column types
|
||||
*/
|
||||
readonly native: any;
|
||||
}
|
||||
|
||||
20
packages/bun-types/test.d.ts
vendored
20
packages/bun-types/test.d.ts
vendored
@@ -88,15 +88,19 @@ declare module "bun:test" {
|
||||
*/
|
||||
export function setSystemTime(now?: Date | number): ThisType<void>;
|
||||
|
||||
interface Jest {
|
||||
restoreAllMocks(): void;
|
||||
clearAllMocks(): void;
|
||||
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
setSystemTime(now?: number | Date): void;
|
||||
setTimeout(milliseconds: number): void;
|
||||
}
|
||||
export const jest: Jest;
|
||||
export namespace jest {
|
||||
function restoreAllMocks(): void;
|
||||
function clearAllMocks(): void;
|
||||
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
function setSystemTime(now?: number | Date): void;
|
||||
function setTimeout(milliseconds: number): void;
|
||||
function useFakeTimers(): void;
|
||||
function useRealTimers(): void;
|
||||
function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
/**
|
||||
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
|
||||
*/
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
#ifndef _WIN32
|
||||
#include <arpa/inet.h>
|
||||
#endif
|
||||
|
||||
#define CONCURRENT_CONNECTIONS 4
|
||||
|
||||
// clang-format off
|
||||
@@ -43,17 +42,20 @@ int us_raw_root_certs(struct us_cert_string_t**out){
|
||||
|
||||
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
/* us_listen_socket_t extends us_socket_t so we close in similar ways */
|
||||
if (!us_socket_is_closed(0, &ls->s)) {
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, ls->s.context, ls);
|
||||
us_poll_stop((struct us_poll_t *) &ls->s, ls->s.context->loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) &ls->s));
|
||||
struct us_socket_t* s = &ls->s;
|
||||
if (!us_socket_is_closed(0, s)) {
|
||||
struct us_socket_context_t* context = s->context;
|
||||
struct us_loop_t* loop = context->loop;
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, context, ls);
|
||||
us_poll_stop((struct us_poll_t *) s, loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) s));
|
||||
|
||||
/* Link this socket to the close-list and let it be deleted after this iteration */
|
||||
ls->s.next = ls->s.context->loop->data.closed_head;
|
||||
ls->s.context->loop->data.closed_head = &ls->s;
|
||||
s->next = loop->data.closed_head;
|
||||
loop->data.closed_head = s;
|
||||
|
||||
/* Any socket with prev = context is marked as closed */
|
||||
ls->s.prev = (struct us_socket_t *) ls->s.context;
|
||||
s->prev = (struct us_socket_t *) context;
|
||||
}
|
||||
|
||||
/* We cannot immediately free a listen socket as we can be inside an accept loop */
|
||||
@@ -91,16 +93,18 @@ void us_internal_socket_context_unlink_listen_socket(int ssl, struct us_socket_c
|
||||
context->iterator = ls->s.next;
|
||||
}
|
||||
|
||||
if (ls->s.prev == ls->s.next) {
|
||||
struct us_socket_t* prev = ls->s.prev;
|
||||
struct us_socket_t* next = ls->s.next;
|
||||
if (prev == next) {
|
||||
context->head_listen_sockets = 0;
|
||||
} else {
|
||||
if (ls->s.prev) {
|
||||
ls->s.prev->next = ls->s.next;
|
||||
if (prev) {
|
||||
prev->next = next;
|
||||
} else {
|
||||
context->head_listen_sockets = (struct us_listen_socket_t *) ls->s.next;
|
||||
context->head_listen_sockets = (struct us_listen_socket_t *) next;
|
||||
}
|
||||
if (ls->s.next) {
|
||||
ls->s.next->prev = ls->s.prev;
|
||||
if (next) {
|
||||
next->prev = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
@@ -112,31 +116,35 @@ void us_internal_socket_context_unlink_socket(int ssl, struct us_socket_context_
|
||||
context->iterator = s->next;
|
||||
}
|
||||
|
||||
if (s->prev == s->next) {
|
||||
struct us_socket_t* prev = s->prev;
|
||||
struct us_socket_t* next = s->next;
|
||||
if (prev == next) {
|
||||
context->head_sockets = 0;
|
||||
} else {
|
||||
if (s->prev) {
|
||||
s->prev->next = s->next;
|
||||
if (prev) {
|
||||
prev->next = next;
|
||||
} else {
|
||||
context->head_sockets = s->next;
|
||||
context->head_sockets = next;
|
||||
}
|
||||
if (s->next) {
|
||||
s->next->prev = s->prev;
|
||||
if (next) {
|
||||
next->prev = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
if (c->prev_pending == c->next_pending) {
|
||||
struct us_connecting_socket_t* prev = c->prev_pending;
|
||||
struct us_connecting_socket_t* next = c->next_pending;
|
||||
if (prev == next) {
|
||||
context->head_connecting_sockets = 0;
|
||||
} else {
|
||||
if (c->prev_pending) {
|
||||
c->prev_pending->next_pending = c->next_pending;
|
||||
if (prev) {
|
||||
prev->next_pending = next;
|
||||
} else {
|
||||
context->head_connecting_sockets = c->next_pending;
|
||||
context->head_connecting_sockets = next;
|
||||
}
|
||||
if (c->next_pending) {
|
||||
c->next_pending->prev_pending = c->prev_pending;
|
||||
if (next) {
|
||||
next->prev_pending = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
@@ -144,11 +152,12 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
ls->s.context = context;
|
||||
ls->s.next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
ls->s.prev = 0;
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
s->prev = 0;
|
||||
if (context->head_listen_sockets) {
|
||||
context->head_listen_sockets->s.prev = &ls->s;
|
||||
context->head_listen_sockets->s.prev = s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
us_socket_context_ref(0, context);
|
||||
@@ -366,15 +375,15 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
|
||||
us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE);
|
||||
|
||||
struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p;
|
||||
|
||||
ls->s.context = context;
|
||||
ls->s.timeout = 255;
|
||||
ls->s.long_timeout = 255;
|
||||
ls->s.flags.low_prio_state = 0;
|
||||
ls->s.flags.is_paused = 0;
|
||||
ls->s.flags.is_ipc = 0;
|
||||
ls->s.next = 0;
|
||||
ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->timeout = 255;
|
||||
s->long_timeout = 255;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
@@ -400,15 +409,16 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE);
|
||||
|
||||
struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p;
|
||||
ls->s.connect_state = NULL;
|
||||
ls->s.context = context;
|
||||
ls->s.timeout = 255;
|
||||
ls->s.long_timeout = 255;
|
||||
ls->s.flags.low_prio_state = 0;
|
||||
ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
ls->s.flags.is_paused = 0;
|
||||
ls->s.flags.is_ipc = 0;
|
||||
ls->s.next = 0;
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->connect_state = NULL;
|
||||
s->context = context;
|
||||
s->timeout = 255;
|
||||
s->long_timeout = 255;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
@@ -504,7 +514,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
}
|
||||
|
||||
struct addrinfo_request* ai_req;
|
||||
if (Bun__addrinfo_get(loop, host, &ai_req) == 0) {
|
||||
if (Bun__addrinfo_get(loop, host, (uint16_t)port, &ai_req) == 0) {
|
||||
// fast path for cached results
|
||||
struct addrinfo_result *result = Bun__addrinfo_getRequestResult(ai_req);
|
||||
// fast failure path
|
||||
@@ -515,9 +525,10 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
}
|
||||
|
||||
// if there is only one result we can immediately connect
|
||||
if (result->entries && result->entries->info.ai_next == NULL) {
|
||||
struct addrinfo_result_entry* entries = result->entries;
|
||||
if (entries && entries->info.ai_next == NULL) {
|
||||
struct sockaddr_storage addr;
|
||||
init_addr_with_port(&result->entries->info, port, &addr);
|
||||
init_addr_with_port(&entries->info, port, &addr);
|
||||
*has_dns_resolved = 1;
|
||||
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
Bun__addrinfo_freeRequest(ai_req, s == NULL);
|
||||
@@ -557,17 +568,19 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
}
|
||||
++opened;
|
||||
bsd_socket_nodelay(connect_socket_fd, 1);
|
||||
|
||||
struct us_socket_t *s = (struct us_socket_t *)us_create_poll(c->context->loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size);
|
||||
s->context = c->context;
|
||||
struct us_loop_t* loop = c->context->loop;
|
||||
struct us_socket_context_t* context = c->context;
|
||||
struct us_socket_t *s = (struct us_socket_t *)us_create_poll(loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size);
|
||||
s->context = context;
|
||||
s->timeout = c->timeout;
|
||||
s->long_timeout = c->long_timeout;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
struct us_socket_flags* flags = &s->flags;
|
||||
flags->low_prio_state = 0;
|
||||
flags->allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
flags->is_paused = 0;
|
||||
flags->is_ipc = 0;
|
||||
/* Link it into context so that timeout fires properly */
|
||||
us_internal_socket_context_link_socket(s->context, s);
|
||||
us_internal_socket_context_link_socket(context, s);
|
||||
|
||||
// TODO check this, specifically how it interacts with the SSL code
|
||||
// does this work when we create multiple sockets at once? will we need multiple SSL contexts?
|
||||
@@ -579,10 +592,10 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
c->connecting_head = s;
|
||||
|
||||
s->connect_state = c;
|
||||
|
||||
struct us_poll_t* poll = &s->p;
|
||||
/* Connect sockets are semi-sockets just like listen sockets */
|
||||
us_poll_init(&s->p, connect_socket_fd, POLL_TYPE_SEMI_SOCKET);
|
||||
us_poll_start(&s->p, s->context->loop, LIBUS_SOCKET_WRITABLE);
|
||||
us_poll_init(poll, connect_socket_fd, POLL_TYPE_SEMI_SOCKET);
|
||||
us_poll_start(poll, loop, LIBUS_SOCKET_WRITABLE);
|
||||
}
|
||||
return opened;
|
||||
}
|
||||
@@ -774,42 +787,50 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
struct us_socket_context_t *old_context = s->context;
|
||||
struct us_loop_t *loop = old_context->loop;
|
||||
/* We need to be sure that we still holding a reference*/
|
||||
us_socket_context_ref(ssl, old_context);
|
||||
if (s->flags.low_prio_state != 1) {
|
||||
/* We need to be sure that we still holding a reference*/
|
||||
us_socket_context_ref(ssl, context);
|
||||
/* This properly updates the iterator if in on_timeout */
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, old_context, s);
|
||||
} else {
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
}
|
||||
|
||||
|
||||
struct us_connecting_socket_t *c = s->connect_state;
|
||||
|
||||
|
||||
struct us_socket_t *new_s = s;
|
||||
|
||||
if (ext_size != -1) {
|
||||
new_s = (struct us_socket_t *) us_poll_resize(&s->p, s->context->loop, sizeof(struct us_socket_t) + ext_size);
|
||||
struct us_poll_t *pool_ref = &s->p;
|
||||
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size);
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
struct us_socket_context_t *old_context = s->context;
|
||||
c->context = context;
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
us_internal_socket_context_unlink_connecting_socket(ssl, old_context, c);
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
}
|
||||
}
|
||||
new_s->context = context;
|
||||
new_s->timeout = 255;
|
||||
new_s->long_timeout = 255;
|
||||
|
||||
if (new_s->flags.low_prio_state == 1) {
|
||||
/* update pointers in low-priority queue */
|
||||
if (!new_s->prev) new_s->context->loop->data.low_prio_head = new_s;
|
||||
if (!new_s->prev) loop->data.low_prio_head = new_s;
|
||||
else new_s->prev->next = new_s;
|
||||
|
||||
if (new_s->next) new_s->next->prev = new_s;
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_ref(ssl, context);
|
||||
} else {
|
||||
us_internal_socket_context_link_socket(context, new_s);
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
/* We can safely unref the old context here with can potentially be freed */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
return new_s;
|
||||
}
|
||||
|
||||
|
||||
@@ -44,10 +44,7 @@ void *sni_find(void *sni, const char *hostname);
|
||||
#include <wolfssl/options.h>
|
||||
#endif
|
||||
|
||||
#include "./root_certs.h"
|
||||
|
||||
/* These are in root_certs.cpp */
|
||||
extern X509_STORE *us_get_default_ca_store();
|
||||
#include "./root_certs_header.h"
|
||||
|
||||
struct loop_ssl_data {
|
||||
char *ssl_read_input, *ssl_read_output;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
// MSVC doesn't support C11 stdatomic.h propertly yet.
|
||||
// so we use C++ std::atomic instead.
|
||||
#include "./root_certs.h"
|
||||
#include "./root_certs_header.h"
|
||||
#include "./internal/internal.h"
|
||||
#include <atomic>
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/x509.h>
|
||||
#include <string.h>
|
||||
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
|
||||
|
||||
@@ -134,6 +133,23 @@ extern "C" int us_internal_raw_root_certs(struct us_cert_string_t **out) {
|
||||
return root_certs_size;
|
||||
}
|
||||
|
||||
struct us_default_ca_certificates {
|
||||
X509 *root_cert_instances[root_certs_size];
|
||||
STACK_OF(X509) *root_extra_cert_instances;
|
||||
};
|
||||
|
||||
us_default_ca_certificates* us_get_default_ca_certificates() {
|
||||
static us_default_ca_certificates default_ca_certificates = {{NULL}, NULL};
|
||||
|
||||
us_internal_init_root_certs(default_ca_certificates.root_cert_instances, default_ca_certificates.root_extra_cert_instances);
|
||||
|
||||
return &default_ca_certificates;
|
||||
}
|
||||
|
||||
STACK_OF(X509) *us_get_root_extra_cert_instances() {
|
||||
return us_get_default_ca_certificates()->root_extra_cert_instances;
|
||||
}
|
||||
|
||||
extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
X509_STORE *store = X509_STORE_new();
|
||||
if (store == NULL) {
|
||||
@@ -145,10 +161,9 @@ extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static X509 *root_cert_instances[root_certs_size] = {NULL};
|
||||
static STACK_OF(X509) *root_extra_cert_instances = NULL;
|
||||
|
||||
us_internal_init_root_certs(root_cert_instances, root_extra_cert_instances);
|
||||
us_default_ca_certificates *default_ca_certificates = us_get_default_ca_certificates();
|
||||
X509** root_cert_instances = default_ca_certificates->root_cert_instances;
|
||||
STACK_OF(X509) *root_extra_cert_instances = default_ca_certificates->root_extra_cert_instances;
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Maintaining the root certificates
|
||||
// Maintaining the root certificates
|
||||
//
|
||||
// `src/crypto/root_certs.h` contains a compiled-in set of root certificates used as trust anchors
|
||||
// for TLS certificate validation.
|
||||
@@ -23,7 +23,7 @@
|
||||
// `src/crypto/root_certs.h`.
|
||||
// * Using `git diff-files` to determine which certificate have been added and/or
|
||||
// removed.
|
||||
//
|
||||
//
|
||||
#include "libusockets.h"
|
||||
static struct us_cert_string_t root_certs[] = {
|
||||
|
||||
|
||||
13
packages/bun-usockets/src/crypto/root_certs_header.h
Normal file
13
packages/bun-usockets/src/crypto/root_certs_header.h
Normal file
@@ -0,0 +1,13 @@
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/x509.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#define CPPDECL extern "C"
|
||||
|
||||
STACK_OF(X509) *us_get_root_extra_cert_instances();
|
||||
|
||||
#else
|
||||
#define CPPDECL extern
|
||||
#endif
|
||||
|
||||
CPPDECL X509_STORE *us_get_default_ca_store();
|
||||
@@ -19,7 +19,6 @@
|
||||
#include "internal/internal.h"
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL) || defined(LIBUS_USE_KQUEUE)
|
||||
|
||||
void Bun__internal_dispatch_ready_poll(void* loop, void* poll);
|
||||
@@ -110,7 +109,7 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
|
||||
}
|
||||
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
#include <sys/syscall.h>
|
||||
#include <signal.h>
|
||||
@@ -131,9 +130,9 @@ extern ssize_t sys_epoll_pwait2(int epfd, struct epoll_event* events, int maxeve
|
||||
|
||||
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
|
||||
int ret;
|
||||
sigset_t mask;
|
||||
sigset_t mask;
|
||||
sigemptyset(&mask);
|
||||
|
||||
|
||||
if (has_epoll_pwait2 != 0) {
|
||||
do {
|
||||
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, &mask);
|
||||
@@ -146,7 +145,7 @@ static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents,
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
|
||||
int timeoutMs = -1;
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
@@ -178,7 +177,7 @@ struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t
|
||||
if (has_epoll_pwait2 == -1) {
|
||||
if (Bun__isEpollPwait2SupportedOnLinuxKernel() == 0) {
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
@@ -338,7 +337,7 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop, struct
|
||||
|
||||
// if new events does not contain the ready events of this poll then remove (no we filter that out later on)
|
||||
SET_READY_POLL(loop, i, new_poll);
|
||||
|
||||
|
||||
num_entries_possibly_remaining--;
|
||||
}
|
||||
}
|
||||
@@ -358,16 +357,16 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
if ((new_events & LIBUS_SOCKET_READABLE) != (old_events & LIBUS_SOCKET_READABLE)) {
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_READ, is_readable ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
|
||||
|
||||
if(!is_readable && !is_writable) {
|
||||
if(!(old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
// if we are not reading or writing, we need to add writable to receive FIN
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, EV_ADD, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
} else if ((new_events & LIBUS_SOCKET_WRITABLE) != (old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
/* Do they differ in writable? */
|
||||
/* Do they differ in writable? */
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
}
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
@@ -381,19 +380,18 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int ext_size) {
|
||||
int events = us_poll_events(p);
|
||||
|
||||
|
||||
struct us_poll_t *new_p = us_realloc(p, sizeof(struct us_poll_t) + ext_size);
|
||||
if (p != new_p && events) {
|
||||
if (p != new_p) {
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
/* Hack: forcefully update poll by stripping away already set events */
|
||||
new_p->state.poll_type = us_internal_poll_type(new_p);
|
||||
us_poll_change(new_p, loop, events);
|
||||
#else
|
||||
/* Forcefully update poll by resetting them with new_p as user data */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, events, new_p);
|
||||
#endif
|
||||
|
||||
/* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p);
|
||||
#endif /* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events);
|
||||
}
|
||||
|
||||
@@ -447,7 +445,7 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
|
||||
#endif
|
||||
/* Set all removed events to null-polls in pending ready poll list */
|
||||
//us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
// us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -673,7 +671,7 @@ struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int f
|
||||
// using it for notifications and not for any other purpose.
|
||||
mach_port_limits_t limits = { .mpl_qlimit = 1 };
|
||||
kr = mach_port_set_attributes(self, cb->port, MACH_PORT_LIMITS_INFO, (mach_port_info_t)&limits, MACH_PORT_LIMITS_INFO_COUNT);
|
||||
|
||||
|
||||
if (UNLIKELY(kr != KERN_SUCCESS)) {
|
||||
return NULL;
|
||||
}
|
||||
@@ -688,7 +686,7 @@ void us_internal_async_close(struct us_internal_async *a) {
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
|
||||
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
@@ -720,7 +718,7 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
|
||||
event.ext[1] = MACHPORT_BUF_LEN;
|
||||
event.udata = (uint64_t)(void*)internal_cb;
|
||||
|
||||
int ret;
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
@@ -750,12 +748,12 @@ void us_internal_async_wakeup(struct us_internal_async *a) {
|
||||
0, // Fail instantly if the port is full
|
||||
MACH_PORT_NULL
|
||||
);
|
||||
|
||||
|
||||
switch (kr) {
|
||||
case KERN_SUCCESS: {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// This means that the send would've blocked because the
|
||||
// queue is full. We assume success because the port is full.
|
||||
case MACH_SEND_TIMED_OUT: {
|
||||
|
||||
@@ -107,8 +107,8 @@ struct addrinfo_result {
|
||||
#define us_internal_ssl_socket_context_r struct us_internal_ssl_socket_context_t *nonnull_arg
|
||||
#define us_internal_ssl_socket_r struct us_internal_ssl_socket_t *nonnull_arg
|
||||
|
||||
extern int Bun__addrinfo_get(struct us_loop_t* loop, const char* host, struct addrinfo_request** ptr);
|
||||
extern int Bun__addrinfo_set(struct addrinfo_request* ptr, struct us_connecting_socket_t* socket);
|
||||
extern int Bun__addrinfo_get(struct us_loop_t* loop, const char* host, uint16_t port, struct addrinfo_request** ptr);
|
||||
extern int Bun__addrinfo_set(struct addrinfo_request* ptr, struct us_connecting_socket_t* socket);
|
||||
extern void Bun__addrinfo_freeRequest(struct addrinfo_request* addrinfo_req, int error);
|
||||
extern struct addrinfo_result *Bun__addrinfo_getRequestResult(struct addrinfo_request* addrinfo_req);
|
||||
|
||||
@@ -158,7 +158,7 @@ void us_internal_socket_after_open(us_socket_r s, int error);
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code,
|
||||
void *reason);
|
||||
|
||||
|
||||
int us_internal_handle_dns_results(us_loop_r loop);
|
||||
|
||||
/* Sockets are polls */
|
||||
@@ -167,9 +167,9 @@ struct us_socket_flags {
|
||||
/* If true, the readable side is paused */
|
||||
bool is_paused: 1;
|
||||
/* Allow to stay alive after FIN/EOF */
|
||||
bool allow_half_open: 1;
|
||||
bool allow_half_open: 1;
|
||||
/* 0 = not in low-prio queue, 1 = is in low-prio queue, 2 = was in low-prio queue in this iteration */
|
||||
unsigned char low_prio_state: 2;
|
||||
unsigned char low_prio_state: 2;
|
||||
/* If true, the socket should be read using readmsg to support receiving file descriptors */
|
||||
bool is_ipc: 1;
|
||||
|
||||
@@ -299,7 +299,7 @@ struct us_socket_context_t {
|
||||
struct us_connecting_socket_t *(*on_connect_error)(struct us_connecting_socket_t *, int code);
|
||||
struct us_socket_t *(*on_socket_connect_error)(struct us_socket_t *, int code);
|
||||
int (*is_low_prio)(struct us_socket_t *);
|
||||
|
||||
|
||||
};
|
||||
|
||||
/* Internal SSL interface */
|
||||
@@ -310,7 +310,7 @@ struct us_internal_ssl_socket_t;
|
||||
typedef void (*us_internal_on_handshake_t)(
|
||||
struct us_internal_ssl_socket_t *, int success,
|
||||
struct us_bun_verify_error_t verify_error, void *custom_data);
|
||||
|
||||
|
||||
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context);
|
||||
/* SNI functions */
|
||||
void us_internal_ssl_socket_context_add_server_name(
|
||||
|
||||
@@ -128,7 +128,7 @@ void us_internal_timer_sweep(struct us_loop_t *loop) {
|
||||
if (context->iterator == s && long_ticks == s->long_timeout) {
|
||||
s->long_timeout = 255;
|
||||
if (context->on_socket_long_timeout != NULL) context->on_socket_long_timeout(s);
|
||||
}
|
||||
}
|
||||
|
||||
/* Check for unlink / link (if the event handler did not modify the chain, we step 1) */
|
||||
if (s == context->iterator) {
|
||||
@@ -336,12 +336,13 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
case POLL_TYPE_SOCKET: {
|
||||
/* We should only use s, no p after this point */
|
||||
struct us_socket_t *s = (struct us_socket_t *) p;
|
||||
|
||||
/* The context can change after calling a callback but the loop is always the same */
|
||||
struct us_loop_t* loop = s->context->loop;
|
||||
if (events & LIBUS_SOCKET_WRITABLE && !error) {
|
||||
/* Note: if we failed a write as a socket of one loop then adopted
|
||||
* to another loop, this will be wrong. Absurd case though */
|
||||
s->context->loop->data.last_write_failed = 0;
|
||||
|
||||
loop->data.last_write_failed = 0;
|
||||
|
||||
s = s->context->on_writable(s);
|
||||
|
||||
if (!s || us_socket_is_closed(0, s)) {
|
||||
@@ -349,8 +350,8 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
}
|
||||
|
||||
/* If we have no failed write or if we shut down, then stop polling for more writable */
|
||||
if (!s->context->loop->data.last_write_failed || us_socket_is_shut_down(0, s)) {
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE);
|
||||
if (!loop->data.last_write_failed || us_socket_is_shut_down(0, s)) {
|
||||
us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,25 +359,28 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* Contexts may prioritize down sockets that are currently readable, e.g. when SSL handshake has to be done.
|
||||
* SSL handshakes are CPU intensive, so we limit the number of handshakes per loop iteration, and move the rest
|
||||
* to the low-priority queue */
|
||||
if (s->context->is_low_prio(s)) {
|
||||
if (s->flags.low_prio_state == 2) {
|
||||
s->flags.low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */
|
||||
} else if (s->context->loop->data.low_prio_budget > 0) {
|
||||
s->context->loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */
|
||||
struct us_socket_context_t *context = s->context;
|
||||
struct us_socket_flags* flags = &s->flags;
|
||||
if (context->is_low_prio(s)) {
|
||||
if (flags->low_prio_state == 2) {
|
||||
flags->low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */
|
||||
} else if (loop->data.low_prio_budget > 0) {
|
||||
loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */
|
||||
} else {
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
us_socket_context_ref(0, s->context);
|
||||
us_internal_socket_context_unlink_socket(0, s->context, s);
|
||||
struct us_poll_t* poll = &s->p;
|
||||
us_poll_change(poll, loop, us_poll_events(poll) & LIBUS_SOCKET_WRITABLE);
|
||||
us_socket_context_ref(0, context);
|
||||
us_internal_socket_context_unlink_socket(0, context, s);
|
||||
|
||||
/* Link this socket to the low-priority queue - we use a LIFO queue, to prioritize newer clients that are
|
||||
* maybe not already timeouted - sounds unfair, but works better in real-life with smaller client-timeouts
|
||||
* under high load */
|
||||
s->prev = 0;
|
||||
s->next = s->context->loop->data.low_prio_head;
|
||||
s->next = loop->data.low_prio_head;
|
||||
if (s->next) s->next->prev = s;
|
||||
s->context->loop->data.low_prio_head = s;
|
||||
loop->data.low_prio_head = s;
|
||||
|
||||
s->flags.low_prio_state = 1;
|
||||
flags->low_prio_state = 1;
|
||||
|
||||
break;
|
||||
}
|
||||
@@ -385,7 +389,6 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
size_t repeat_recv_count = 0;
|
||||
|
||||
do {
|
||||
const struct us_loop_t* loop = s->context->loop;
|
||||
#ifdef _WIN32
|
||||
const int recv_flags = MSG_PUSH_IMMEDIATE;
|
||||
#else
|
||||
@@ -398,20 +401,20 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
struct msghdr msg = {0};
|
||||
struct iovec iov = {0};
|
||||
char cmsg_buf[CMSG_SPACE(sizeof(int))];
|
||||
|
||||
|
||||
iov.iov_base = loop->data.recv_buf + LIBUS_RECV_BUFFER_PADDING;
|
||||
iov.iov_len = LIBUS_RECV_BUFFER_LENGTH;
|
||||
|
||||
msg.msg_flags = 0;
|
||||
msg.msg_flags = 0;
|
||||
msg.msg_iov = &iov;
|
||||
msg.msg_iovlen = 1;
|
||||
msg.msg_name = NULL;
|
||||
msg.msg_namelen = 0;
|
||||
msg.msg_controllen = CMSG_LEN(sizeof(int));
|
||||
msg.msg_control = cmsg_buf;
|
||||
|
||||
|
||||
length = bsd_recvmsg(us_poll_fd(&s->p), &msg, recv_flags);
|
||||
|
||||
|
||||
// Extract file descriptor if present
|
||||
if (length > 0 && msg.msg_controllen > 0) {
|
||||
struct cmsghdr *cmsg_ptr = CMSG_FIRSTHDR(&msg);
|
||||
@@ -439,14 +442,14 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
// - the event loop isn't very busy, so we can read multiple times in a row
|
||||
#define LOOP_ISNT_VERY_BUSY_THRESHOLD 25
|
||||
if (
|
||||
s && length >= (LIBUS_RECV_BUFFER_LENGTH - 24 * 1024) && length <= LIBUS_RECV_BUFFER_LENGTH &&
|
||||
(error || loop->num_ready_polls < LOOP_ISNT_VERY_BUSY_THRESHOLD) &&
|
||||
s && length >= (LIBUS_RECV_BUFFER_LENGTH - 24 * 1024) && length <= LIBUS_RECV_BUFFER_LENGTH &&
|
||||
(error || loop->num_ready_polls < LOOP_ISNT_VERY_BUSY_THRESHOLD) &&
|
||||
!us_socket_is_closed(0, s)
|
||||
) {
|
||||
repeat_recv_count += error == 0;
|
||||
|
||||
// When not hung up, read a maximum of 10 times to avoid starving other sockets
|
||||
// We don't bother with ioctl(FIONREAD) because we've set MSG_DONTWAIT
|
||||
// We don't bother with ioctl(FIONREAD) because we've set MSG_DONTWAIT
|
||||
if (!(repeat_recv_count > 10 && loop->num_ready_polls > 2)) {
|
||||
continue;
|
||||
}
|
||||
@@ -478,7 +481,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
}
|
||||
if(s->flags.allow_half_open) {
|
||||
/* We got a Error but is EOF and we allow half open so stop polling for readable and keep going*/
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
s = s->context->on_end(s);
|
||||
} else {
|
||||
/* We dont allow half open just emit end and close the socket */
|
||||
@@ -486,7 +489,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* Such as epollerr or EV_ERROR */
|
||||
if (error && s) {
|
||||
/* Todo: decide what code we give here */
|
||||
|
||||
@@ -387,29 +387,29 @@ int us_socket_ipc_write_fd(struct us_socket_t *s, const char* data, int length,
|
||||
struct msghdr msg = {0};
|
||||
struct iovec iov = {0};
|
||||
char cmsgbuf[CMSG_SPACE(sizeof(int))];
|
||||
|
||||
|
||||
iov.iov_base = (void*)data;
|
||||
iov.iov_len = length;
|
||||
|
||||
|
||||
msg.msg_iov = &iov;
|
||||
msg.msg_iovlen = 1;
|
||||
msg.msg_control = cmsgbuf;
|
||||
msg.msg_controllen = CMSG_SPACE(sizeof(int));
|
||||
|
||||
|
||||
struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);
|
||||
cmsg->cmsg_level = SOL_SOCKET;
|
||||
cmsg->cmsg_type = SCM_RIGHTS;
|
||||
cmsg->cmsg_len = CMSG_LEN(sizeof(int));
|
||||
|
||||
|
||||
*(int *)CMSG_DATA(cmsg) = fd;
|
||||
|
||||
|
||||
int sent = bsd_sendmsg(us_poll_fd(&s->p), &msg, 0);
|
||||
|
||||
|
||||
if (sent != length) {
|
||||
s->context->loop->data.last_write_failed = 1;
|
||||
us_poll_change(&s->p, s->context->loop, LIBUS_SOCKET_READABLE | LIBUS_SOCKET_WRITABLE);
|
||||
}
|
||||
|
||||
|
||||
return sent < 0 ? 0 : sent;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -249,7 +249,7 @@ public:
|
||||
}
|
||||
|
||||
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
|
||||
|
||||
|
||||
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
|
||||
if (!httpContext) {
|
||||
return nullptr;
|
||||
@@ -646,4 +646,3 @@ typedef TemplatedApp<false> App;
|
||||
typedef TemplatedApp<true> SSLApp;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -224,7 +224,7 @@ public:
|
||||
|
||||
/**
|
||||
* Flushes the socket buffer by writing as much data as possible to the underlying socket.
|
||||
*
|
||||
*
|
||||
* @return The total number of bytes successfully written to the socket
|
||||
*/
|
||||
size_t flush() {
|
||||
@@ -237,30 +237,30 @@ public:
|
||||
/* Get the associated asynchronous socket data structure */
|
||||
AsyncSocketData<SSL> *asyncSocketData = getAsyncSocketData();
|
||||
size_t total_written = 0;
|
||||
|
||||
|
||||
/* Continue flushing as long as we have data in the buffer */
|
||||
while (asyncSocketData->buffer.length()) {
|
||||
/* Get current buffer size */
|
||||
size_t buffer_len = asyncSocketData->buffer.length();
|
||||
|
||||
|
||||
/* Limit write size to INT_MAX as the underlying socket API uses int for length */
|
||||
int max_flush_len = std::min(buffer_len, (size_t)INT_MAX);
|
||||
|
||||
/* Attempt to write data to the socket */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, 0);
|
||||
total_written += written;
|
||||
|
||||
|
||||
/* Check if we couldn't write the entire buffer */
|
||||
if ((unsigned int) written < buffer_len) {
|
||||
/* Remove the successfully written data from the buffer */
|
||||
asyncSocketData->buffer.erase((unsigned int) written);
|
||||
|
||||
|
||||
/* If we wrote less than we attempted, the socket buffer is likely full
|
||||
* likely is used as an optimization hint to the compiler
|
||||
* since written < buffer_len is very likely to be true
|
||||
*/
|
||||
if(written < max_flush_len) {
|
||||
[[likely]]
|
||||
[[likely]]
|
||||
/* Cannot write more at this time, return what we've written so far */
|
||||
return total_written;
|
||||
}
|
||||
@@ -317,7 +317,7 @@ public:
|
||||
asyncSocketData->buffer.clear();
|
||||
}
|
||||
|
||||
if (length) {
|
||||
if (length) {
|
||||
if (loopData->isCorkedWith(this)) {
|
||||
/* We are corked */
|
||||
if (LoopData::CORK_BUFFER_SIZE - loopData->getCorkOffset() >= (unsigned int) length) {
|
||||
|
||||
@@ -52,7 +52,7 @@ public:
|
||||
if (key.length() < 2) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
ScrambleArea s = getFeatures(key);
|
||||
s.val = perfectHash(s.val);
|
||||
return filter[s.p[0]] &&
|
||||
|
||||
@@ -76,7 +76,7 @@ namespace uWS {
|
||||
data.remove_prefix(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
auto len = data.length();
|
||||
if(len) {
|
||||
// consume extension
|
||||
@@ -93,20 +93,20 @@ namespace uWS {
|
||||
/* RFC 9110: Token format (TLDR; anything bellow 32 is not allowed)
|
||||
* TODO: add support for quoted-strings values (RFC 9110: 3.2.6. Quoted-String)
|
||||
* Example of chunked encoding with extensions:
|
||||
*
|
||||
*
|
||||
* 4;key=value\r\n
|
||||
* Wiki\r\n
|
||||
* 5;foo=bar;baz=quux\r\n
|
||||
* pedia\r\n
|
||||
* 0\r\n
|
||||
* \r\n
|
||||
*
|
||||
*
|
||||
* The chunk size is in hex (4, 5, 0), followed by optional
|
||||
* semicolon-separated extensions. Extensions consist of a key
|
||||
* (token) and optional value. The value may be a token or a
|
||||
* quoted string. The chunk data follows the CRLF after the
|
||||
* extensions and must be exactly the size specified.
|
||||
*
|
||||
*
|
||||
* RFC 7230 Section 4.1.1 defines chunk extensions as:
|
||||
* chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
|
||||
* chunk-ext-name = token
|
||||
@@ -116,7 +116,7 @@ namespace uWS {
|
||||
state = STATE_IS_ERROR;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
data.remove_prefix(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,5 +17,5 @@ namespace uWS {
|
||||
//printf("Constructing http3contextdata: %p\n", this);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
@@ -77,7 +77,7 @@ namespace uWS {
|
||||
|
||||
/* If not already written */
|
||||
writeStatus("200 OK");
|
||||
|
||||
|
||||
// has body is determined by the ending so this is perfect here
|
||||
us_quic_socket_context_send_headers(nullptr, (us_quic_stream_t *) this, responseData->headerOffset, data.length() > 0);
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ namespace uWS {
|
||||
|
||||
/* Status is always first header just like for h1 */
|
||||
unsigned int headerOffset = 0;
|
||||
|
||||
|
||||
/* Write offset */
|
||||
uint64_t offset = 0;
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ private:
|
||||
MACRO("UNLINK") \
|
||||
MACRO("UNLOCK") \
|
||||
MACRO("UNSUBSCRIBE") \
|
||||
|
||||
|
||||
|
||||
#ifndef _WIN32
|
||||
static constexpr std::array<const std::string, 35> HTTP_METHODS = {
|
||||
@@ -108,12 +108,12 @@ private:
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
};
|
||||
|
||||
|
||||
static std::span<const std::string> getAllHttpMethods() {
|
||||
static std::once_flag flag;
|
||||
static std::array<std::string, 35> methods;
|
||||
std::call_once(flag, []() {
|
||||
methods = {
|
||||
methods = {
|
||||
#define MACRO(name) std::string {name},
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
@@ -201,7 +201,7 @@ private:
|
||||
/* Call filter */
|
||||
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
|
||||
|
||||
|
||||
|
||||
for (auto &f : httpContextData->filterHandlers) {
|
||||
f((HttpResponse<SSL> *) s, -1);
|
||||
}
|
||||
@@ -276,7 +276,7 @@ private:
|
||||
|
||||
/* Mark pending request and emit it */
|
||||
httpResponseData->state = HttpResponseData<SSL>::HTTP_RESPONSE_PENDING;
|
||||
|
||||
|
||||
|
||||
/* Mark this response as connectionClose if ancient or connection: close */
|
||||
if (httpRequest->isAncient() || httpRequest->getHeader("connection").length() == 5) {
|
||||
@@ -336,7 +336,7 @@ private:
|
||||
}, [httpResponseData](void *user, std::string_view data, bool fin) -> void * {
|
||||
/* We always get an empty chunk even if there is no data */
|
||||
if (httpResponseData->inStream) {
|
||||
|
||||
|
||||
/* Todo: can this handle timeout for non-post as well? */
|
||||
if (fin) {
|
||||
/* If we just got the last chunk (or empty chunk), disable timeout */
|
||||
@@ -374,7 +374,7 @@ private:
|
||||
});
|
||||
|
||||
auto httpErrorStatusCode = result.httpErrorStatusCode();
|
||||
|
||||
|
||||
/* Mark that we are no longer parsing Http */
|
||||
httpContextData->flags.isParsingHttp = false;
|
||||
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
|
||||
@@ -388,7 +388,7 @@ private:
|
||||
/* Close any socket on HTTP errors */
|
||||
us_socket_close(SSL, s, 0, nullptr);
|
||||
}
|
||||
|
||||
|
||||
auto returnedData = result.returnedData;
|
||||
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
|
||||
if (returnedData != nullptr) {
|
||||
@@ -456,10 +456,9 @@ private:
|
||||
size_t bufferedAmount = asyncSocket->getBufferedAmount();
|
||||
if (bufferedAmount > 0) {
|
||||
/* Try to flush pending data from the socket's buffer to the network */
|
||||
bufferedAmount -= asyncSocket->flush();
|
||||
|
||||
asyncSocket->flush();
|
||||
/* Check if there's still data waiting to be sent after flush attempt */
|
||||
if (bufferedAmount > 0) {
|
||||
if (asyncSocket->getBufferedAmount() > 0) {
|
||||
/* Socket buffer is not completely empty yet
|
||||
* - Reset the timeout to prevent premature connection closure
|
||||
* - This allows time for another writable event or new request
|
||||
@@ -472,12 +471,12 @@ private:
|
||||
* and will fall through to the next section of code
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
/* Ask the developer to write data and return success (true) or failure (false), OR skip sending anything and return success (true). */
|
||||
if (httpResponseData->onWritable) {
|
||||
/* We are now writable, so hang timeout again, the user does not have to do anything so we should hang until end or tryEnd rearms timeout */
|
||||
us_socket_timeout(SSL, s, 0);
|
||||
|
||||
|
||||
/* We expect the developer to return whether or not write was successful (true).
|
||||
* If write was never called, the developer should still return true so that we may drain. */
|
||||
bool success = httpResponseData->callOnWritable(reinterpret_cast<HttpResponse<SSL> *>(asyncSocket), httpResponseData->offset);
|
||||
@@ -498,6 +497,7 @@ private:
|
||||
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) == 0) {
|
||||
if (asyncSocket->getBufferedAmount() == 0) {
|
||||
|
||||
asyncSocket->shutdown();
|
||||
/* We need to force close after sending FIN since we want to hinder
|
||||
* clients from keeping to send their huge data */
|
||||
@@ -588,7 +588,7 @@ public:
|
||||
methods = getAllHttpMethods();
|
||||
} else {
|
||||
methods_buffer[0] = std::string(method);
|
||||
methods = {methods_buffer.data(), 1};
|
||||
methods = {methods_buffer.data(), 1};
|
||||
}
|
||||
|
||||
uint32_t priority = method == "*" ? httpContextData->currentRouter->LOW_PRIORITY : (upgrade ? httpContextData->currentRouter->HIGH_PRIORITY : httpContextData->currentRouter->MEDIUM_PRIORITY);
|
||||
@@ -616,7 +616,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets), httpContextData](auto *r) mutable {
|
||||
auto user = r->getUserData();
|
||||
@@ -667,5 +667,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -39,4 +39,3 @@ static const std::string_view httpErrorResponses[] = {
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -108,7 +108,7 @@ namespace uWS
|
||||
}
|
||||
|
||||
|
||||
/* Returns true if there was an error */
|
||||
/* Returns true if there was an error */
|
||||
bool isError() {
|
||||
return parserError != HTTP_PARSER_ERROR_NONE;
|
||||
}
|
||||
@@ -403,7 +403,7 @@ namespace uWS
|
||||
|
||||
static bool isValidMethod(std::string_view str, bool useStrictMethodValidation) {
|
||||
if (str.empty()) return false;
|
||||
|
||||
|
||||
if (useStrictMethodValidation) {
|
||||
return Bun__HTTPMethod__from(str.data(), str.length()) != -1;
|
||||
}
|
||||
@@ -613,22 +613,25 @@ namespace uWS
|
||||
return HttpParserResult::shortRead();
|
||||
}
|
||||
postPaddedBuffer = requestLineResult.position;
|
||||
|
||||
|
||||
if(requestLineResult.isAncientHTTP) {
|
||||
isAncientHTTP = true;
|
||||
}
|
||||
/* No request headers found */
|
||||
size_t buffer_size = end - postPaddedBuffer;
|
||||
const char * headerStart = (headers[0].key.length() > 0) ? headers[0].key.data() : end;
|
||||
|
||||
if(buffer_size < 2) {
|
||||
/* Fragmented request */
|
||||
return HttpParserResult::error(HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_REQUEST);
|
||||
|
||||
/* Check if we can see if headers follow or not */
|
||||
if (postPaddedBuffer + 2 > end) {
|
||||
/* Not enough data to check for \r\n */
|
||||
return HttpParserResult::shortRead();
|
||||
}
|
||||
if(buffer_size >= 2 && postPaddedBuffer[0] == '\r' && postPaddedBuffer[1] == '\n') {
|
||||
/* No headers found */
|
||||
|
||||
/* Check for empty headers (no headers, just \r\n) */
|
||||
if (postPaddedBuffer[0] == '\r' && postPaddedBuffer[1] == '\n') {
|
||||
/* Valid request with no headers */
|
||||
return HttpParserResult::success((unsigned int) ((postPaddedBuffer + 2) - start));
|
||||
}
|
||||
|
||||
headers++;
|
||||
|
||||
for (unsigned int i = 1; i < UWS_HTTP_MAX_HEADERS_COUNT - 1; i++) {
|
||||
@@ -708,7 +711,7 @@ namespace uWS
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
|
||||
if(postPaddedBuffer[0] == '\r') {
|
||||
// invalid char after \r
|
||||
return HttpParserResult::error(HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_REQUEST);
|
||||
@@ -754,7 +757,7 @@ namespace uWS
|
||||
|
||||
/* Add all headers to bloom filter */
|
||||
req->bf.reset();
|
||||
|
||||
|
||||
for (HttpRequest::Header *h = req->headers; (++h)->key.length(); ) {
|
||||
req->bf.add(h->key);
|
||||
}
|
||||
@@ -861,7 +864,7 @@ namespace uWS
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return HttpParserResult::success(consumedTotal, user);
|
||||
}
|
||||
|
||||
@@ -997,4 +1000,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -106,13 +106,13 @@ public:
|
||||
if (closeConnection) {
|
||||
/* We can only write the header once */
|
||||
if (!(httpResponseData->state & (HttpResponseData<SSL>::HTTP_END_CALLED))) {
|
||||
|
||||
|
||||
/* HTTP 1.1 must send this back unless the client already sent it to us.
|
||||
* It is a connection close when either of the two parties say so but the
|
||||
* one party must tell the other one so.
|
||||
*
|
||||
* This check also serves to limit writing the header only once. */
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0 && !(httpResponseData->state & (HttpResponseData<SSL>::HTTP_WRITE_CALLED))) {
|
||||
writeHeader("Connection", "close");
|
||||
}
|
||||
|
||||
@@ -125,14 +125,13 @@ public:
|
||||
|
||||
/* We do not have tryWrite-like functionalities, so ignore optional in this path */
|
||||
|
||||
|
||||
|
||||
/* Write the chunked data if there is any (this will not send zero chunks) */
|
||||
this->write(data, nullptr);
|
||||
|
||||
|
||||
|
||||
/* Terminating 0 chunk */
|
||||
Super::write("0\r\n\r\n", 5);
|
||||
|
||||
httpResponseData->markDone();
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
@@ -471,7 +470,7 @@ public:
|
||||
writeStatus(HTTP_200_OK);
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
|
||||
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER) && !httpResponseData->fromAncientRequest) {
|
||||
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
|
||||
/* Write mark on first call to write */
|
||||
@@ -533,7 +532,7 @@ public:
|
||||
}
|
||||
return !has_failed;
|
||||
}
|
||||
|
||||
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
@@ -546,7 +545,7 @@ public:
|
||||
Super::write("\r\n", 2);
|
||||
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
|
||||
}
|
||||
|
||||
|
||||
writeUnsignedHex((unsigned int) data.length());
|
||||
Super::write("\r\n", 2);
|
||||
} else if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
|
||||
@@ -579,14 +578,13 @@ public:
|
||||
// Write End of Chunked Encoding after data has been written
|
||||
Super::write("\r\n", 2);
|
||||
}
|
||||
|
||||
|
||||
/* Reset timeout on each sended chunk */
|
||||
this->resetTimeout();
|
||||
|
||||
if (writtenPtr) {
|
||||
*writtenPtr = total_written;
|
||||
}
|
||||
|
||||
/* If we did not fail the write, accept more */
|
||||
return !has_failed;
|
||||
}
|
||||
|
||||
@@ -109,5 +109,3 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ private:
|
||||
LoopData *loopData = (LoopData *) us_loop_ext((struct us_loop_t *) loop);
|
||||
loopData->dateTimer = us_create_timer((struct us_loop_t *) loop, 1, sizeof(LoopData *));
|
||||
loopData->updateDate();
|
||||
|
||||
|
||||
memcpy(us_timer_ext(loopData->dateTimer), &loopData, sizeof(LoopData *));
|
||||
us_timer_set(loopData->dateTimer, [](struct us_timer_t *t) {
|
||||
LoopData *loopData;
|
||||
@@ -103,7 +103,7 @@ private:
|
||||
~LoopCleaner() {
|
||||
// There's no need to call this destructor if Bun is in the process of exiting.
|
||||
// This is both a performance thing, and also to prevent freeing some things which are not meant to be freed
|
||||
// such as uv_tty_t
|
||||
// such as uv_tty_t
|
||||
if(loop && cleanMe && !bun_is_exiting()) {
|
||||
cleanMe = false;
|
||||
loop->free();
|
||||
|
||||
@@ -97,11 +97,11 @@ public:
|
||||
this->corkedSocket = nullptr;
|
||||
this->corkOffset = 0;
|
||||
}
|
||||
|
||||
|
||||
unsigned int getCorkOffset() {
|
||||
return this->corkOffset;
|
||||
}
|
||||
|
||||
|
||||
void setCorkOffset(unsigned int offset) {
|
||||
this->corkOffset = offset;
|
||||
}
|
||||
@@ -109,7 +109,7 @@ public:
|
||||
void incrementCorkedOffset(unsigned int offset) {
|
||||
this->corkOffset += offset;
|
||||
}
|
||||
|
||||
|
||||
char* getCorkBuffer() {
|
||||
return this->corkBuffer;
|
||||
}
|
||||
@@ -118,7 +118,6 @@ public:
|
||||
time_t now = time(0);
|
||||
struct tm tstruct = {};
|
||||
#ifdef _WIN32
|
||||
/* Micro, fucking soft never follows spec. */
|
||||
gmtime_s(&tstruct, &now);
|
||||
#else
|
||||
gmtime_r(&now, &tstruct);
|
||||
|
||||
@@ -260,7 +260,7 @@ public:
|
||||
/* This one always resets needsDrainage before it calls any cb's.
|
||||
* Otherwise we would stackoverflow when sending after publish but before drain. */
|
||||
drainImpl(s);
|
||||
|
||||
|
||||
/* If we drained last subscriber, also clear outgoingMessages */
|
||||
if (!drainableSubscribers) {
|
||||
outgoingMessages.clear();
|
||||
@@ -363,5 +363,3 @@ public:
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
731
scripts/buildkite-failures.ts
Executable file
731
scripts/buildkite-failures.ts
Executable file
@@ -0,0 +1,731 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun";
|
||||
import { existsSync } from "fs";
|
||||
import { resolve } from "path";
|
||||
|
||||
// Check if we're in a TTY for color support
|
||||
const isTTY = process.stdout.isTTY || process.env.FORCE_COLOR === '1';
|
||||
|
||||
// Get git root directory
|
||||
let gitRoot = process.cwd();
|
||||
try {
|
||||
gitRoot = (await $`git rev-parse --show-toplevel`.quiet().text()).trim();
|
||||
} catch {
|
||||
// Fall back to current directory if not in a git repo
|
||||
}
|
||||
|
||||
// Helper to convert file path to file:// URL if it exists
|
||||
function fileToUrl(filePath) {
|
||||
try {
|
||||
// Extract just the file path without line numbers or other info
|
||||
const match = filePath.match(/^([^\s:]+\.(ts|js|tsx|jsx|zig))/);
|
||||
if (!match) return filePath;
|
||||
|
||||
const cleanPath = match[1];
|
||||
const fullPath = resolve(gitRoot, cleanPath);
|
||||
|
||||
if (existsSync(fullPath)) {
|
||||
return `file://${fullPath}`;
|
||||
}
|
||||
} catch (error) {
|
||||
// If anything fails, just return the original path
|
||||
}
|
||||
|
||||
return filePath;
|
||||
}
|
||||
|
||||
// Color codes - simpler color scheme
|
||||
const colors = {
|
||||
reset: isTTY ? '\x1b[0m' : '',
|
||||
bold: isTTY ? '\x1b[1m' : '',
|
||||
dim: isTTY ? '\x1b[2m' : '',
|
||||
red: isTTY ? '\x1b[31m' : '',
|
||||
green: isTTY ? '\x1b[32m' : '',
|
||||
bgBlue: isTTY ? '\x1b[44m' : '',
|
||||
bgRed: isTTY ? '\x1b[41m' : '',
|
||||
white: isTTY ? '\x1b[97m' : '',
|
||||
};
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const showWarnings = args.includes('--warnings') || args.includes('-w');
|
||||
const showFlaky = args.includes('--flaky') || args.includes('-f');
|
||||
const inputArg = args.find(arg => !arg.startsWith("-"));
|
||||
|
||||
// Determine what type of input we have
|
||||
let buildNumber = null;
|
||||
let branch = null;
|
||||
|
||||
if (inputArg) {
|
||||
// BuildKite URL
|
||||
if (inputArg.includes('buildkite.com')) {
|
||||
const buildMatch = inputArg.match(/builds\/(\d+)/);
|
||||
if (buildMatch) {
|
||||
buildNumber = buildMatch[1];
|
||||
}
|
||||
}
|
||||
// GitHub PR URL
|
||||
else if (inputArg.includes('github.com') && inputArg.includes('/pull/')) {
|
||||
const prMatch = inputArg.match(/pull\/(\d+)/);
|
||||
if (prMatch) {
|
||||
// Fetch PR info from GitHub API
|
||||
const prNumber = prMatch[1];
|
||||
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
|
||||
if (prResponse.ok) {
|
||||
const pr = await prResponse.json();
|
||||
branch = pr.head.ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Plain number or #number - assume it's a GitHub PR
|
||||
else if (/^#?\d+$/.test(inputArg)) {
|
||||
const prNumber = inputArg.replace('#', '');
|
||||
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
|
||||
if (prResponse.ok) {
|
||||
const pr = await prResponse.json();
|
||||
branch = pr.head.ref;
|
||||
} else {
|
||||
// If not a valid PR, maybe it's a BuildKite build number
|
||||
buildNumber = prNumber;
|
||||
}
|
||||
}
|
||||
// Otherwise assume it's a branch name
|
||||
else {
|
||||
branch = inputArg;
|
||||
}
|
||||
} else {
|
||||
// No input, use current branch
|
||||
branch = (await $`git rev-parse --abbrev-ref HEAD`.text()).trim();
|
||||
}
|
||||
|
||||
// If branch specified, find latest build
|
||||
if (!buildNumber) {
|
||||
const buildsUrl = `https://buildkite.com/bun/bun/builds?branch=${encodeURIComponent(branch)}`;
|
||||
const response = await fetch(buildsUrl);
|
||||
const html = await response.text();
|
||||
const match = html.match(/\/bun\/bun\/builds\/(\d+)/);
|
||||
|
||||
if (!match) {
|
||||
console.log(`No builds found for branch: ${branch}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
buildNumber = match[1];
|
||||
}
|
||||
|
||||
// Fetch build JSON
|
||||
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
|
||||
const build = await buildResponse.json();
|
||||
|
||||
// Calculate time ago
|
||||
const buildTime = new Date(build.started_at);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - buildTime.getTime();
|
||||
const diffSecs = Math.floor(diffMs / 1000);
|
||||
const diffMins = Math.floor(diffSecs / 60);
|
||||
const diffHours = Math.floor(diffMins / 60);
|
||||
const diffDays = Math.floor(diffHours / 24);
|
||||
|
||||
let timeAgo;
|
||||
if (diffDays > 0) {
|
||||
timeAgo = `${diffDays} day${diffDays !== 1 ? 's' : ''} ago`;
|
||||
} else if (diffHours > 0) {
|
||||
timeAgo = `${diffHours} hour${diffHours !== 1 ? 's' : ''} ago`;
|
||||
} else if (diffMins > 0) {
|
||||
timeAgo = `${diffMins} minute${diffMins !== 1 ? 's' : ''} ago`;
|
||||
} else {
|
||||
timeAgo = `${diffSecs} second${diffSecs !== 1 ? 's' : ''} ago`;
|
||||
}
|
||||
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
|
||||
// Check if build passed
|
||||
if (build.state === "passed") {
|
||||
console.log(`${colors.green}✅ Passed!${colors.reset}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Get failed jobs
|
||||
const failedJobs = build.jobs?.filter(job =>
|
||||
job.exit_status && job.exit_status > 0 &&
|
||||
!job.soft_failed &&
|
||||
job.type === "script"
|
||||
) || [];
|
||||
|
||||
// Platform emoji mapping
|
||||
const platformMap = {
|
||||
'darwin': '🍎',
|
||||
'macos': '🍎',
|
||||
'ubuntu': '🐧',
|
||||
'debian': '🐧',
|
||||
'alpine': '🐧',
|
||||
'linux': '🐧',
|
||||
'windows': '🪟',
|
||||
'win': '🪟',
|
||||
};
|
||||
|
||||
// Fetch annotations by scraping the build page
|
||||
const pageResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}`);
|
||||
const pageHtml = await pageResponse.text();
|
||||
|
||||
// Extract script tags using HTMLRewriter
|
||||
let annotationsData = null;
|
||||
const scriptContents: string[] = [];
|
||||
|
||||
const scriptRewriter = new HTMLRewriter()
|
||||
.on('script', {
|
||||
text(text) {
|
||||
scriptContents.push(text.text);
|
||||
}
|
||||
});
|
||||
|
||||
await new Response(scriptRewriter.transform(new Response(pageHtml))).text();
|
||||
|
||||
// Find the registerRequest call in script contents
|
||||
const fullScript = scriptContents.join('');
|
||||
let registerRequestIndex = fullScript.indexOf('registerRequest');
|
||||
|
||||
// Find the AnnotationsListRendererQuery after registerRequest
|
||||
if (registerRequestIndex !== -1) {
|
||||
const afterRegisterRequest = fullScript.substring(registerRequestIndex);
|
||||
const annotationsIndex = afterRegisterRequest.indexOf('"AnnotationsListRendererQuery"');
|
||||
if (annotationsIndex === -1 || annotationsIndex > 100) {
|
||||
// Not the right registerRequest call
|
||||
registerRequestIndex = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (registerRequestIndex !== -1) {
|
||||
try {
|
||||
// Find the start of the JSON object (after the comma and any whitespace)
|
||||
let jsonStart = registerRequestIndex;
|
||||
|
||||
// Skip to the opening brace, accounting for the function name and first parameter
|
||||
let commaFound = false;
|
||||
for (let i = registerRequestIndex; i < fullScript.length; i++) {
|
||||
if (fullScript[i] === ',' && !commaFound) {
|
||||
commaFound = true;
|
||||
} else if (commaFound && fullScript[i] === '{') {
|
||||
jsonStart = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the matching closing brace, considering strings
|
||||
let braceCount = 0;
|
||||
let jsonEnd = jsonStart;
|
||||
let inString = false;
|
||||
let escapeNext = false;
|
||||
|
||||
for (let i = jsonStart; i < fullScript.length; i++) {
|
||||
const char = fullScript[i];
|
||||
|
||||
if (escapeNext) {
|
||||
escapeNext = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '\\') {
|
||||
escapeNext = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '"' && !inString) {
|
||||
inString = true;
|
||||
} else if (char === '"' && inString) {
|
||||
inString = false;
|
||||
}
|
||||
|
||||
if (!inString) {
|
||||
if (char === '{') braceCount++;
|
||||
else if (char === '}') {
|
||||
braceCount--;
|
||||
if (braceCount === 0) {
|
||||
jsonEnd = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const jsonString = fullScript.substring(jsonStart, jsonEnd);
|
||||
annotationsData = JSON.parse(jsonString);
|
||||
const edges = annotationsData?.build?.annotations?.edges || [];
|
||||
|
||||
// Just collect all unique annotations by context
|
||||
const annotationsByContext = new Map();
|
||||
|
||||
for (const edge of edges) {
|
||||
const node = edge.node;
|
||||
if (!node || !node.context) continue;
|
||||
|
||||
// Skip if we already have this context
|
||||
if (annotationsByContext.has(node.context)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
annotationsByContext.set(node.context, {
|
||||
context: node.context,
|
||||
html: node.body?.html || ''
|
||||
});
|
||||
}
|
||||
|
||||
// Collect annotations
|
||||
const annotations = Array.from(annotationsByContext.values());
|
||||
|
||||
// Group annotations by test file to detect duplicates
|
||||
const annotationsByFile = new Map();
|
||||
const nonFileAnnotations = [];
|
||||
|
||||
for (const annotation of annotations) {
|
||||
// Check if this is a file-based annotation
|
||||
const isFileAnnotation = annotation.context.match(/\.(ts|js|tsx|jsx|zig)$/);
|
||||
|
||||
if (isFileAnnotation) {
|
||||
// Parse the HTML to extract all platform sections
|
||||
const html = annotation.html || '';
|
||||
|
||||
// Check if this annotation contains multiple <details> sections (one per platform)
|
||||
const detailsSections = html.match(/<details>[\s\S]*?<\/details>/g);
|
||||
|
||||
if (detailsSections && detailsSections.length > 1) {
|
||||
// Multiple platform failures in one annotation
|
||||
for (const section of detailsSections) {
|
||||
const summaryMatch = section.match(/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/);
|
||||
|
||||
if (summaryMatch) {
|
||||
const filePath = summaryMatch[1];
|
||||
const failureInfo = summaryMatch[2];
|
||||
const platformHtml = summaryMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, '').trim();
|
||||
|
||||
const fileKey = `${filePath}|${failureInfo}`;
|
||||
if (!annotationsByFile.has(fileKey)) {
|
||||
annotationsByFile.set(fileKey, {
|
||||
filePath,
|
||||
failureInfo,
|
||||
platforms: [],
|
||||
htmlParts: [],
|
||||
originalAnnotations: []
|
||||
});
|
||||
}
|
||||
|
||||
const entry = annotationsByFile.get(fileKey);
|
||||
entry.platforms.push(platform);
|
||||
entry.htmlParts.push(section);
|
||||
entry.originalAnnotations.push({
|
||||
...annotation,
|
||||
html: section,
|
||||
originalHtml: html
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Single platform failure
|
||||
const summaryMatch = html.match(/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/);
|
||||
|
||||
if (summaryMatch) {
|
||||
const filePath = summaryMatch[1];
|
||||
const failureInfo = summaryMatch[2];
|
||||
const platformHtml = summaryMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, '').trim();
|
||||
|
||||
const fileKey = `${filePath}|${failureInfo}`;
|
||||
if (!annotationsByFile.has(fileKey)) {
|
||||
annotationsByFile.set(fileKey, {
|
||||
filePath,
|
||||
failureInfo,
|
||||
platforms: [],
|
||||
htmlParts: [],
|
||||
originalAnnotations: []
|
||||
});
|
||||
}
|
||||
|
||||
const entry = annotationsByFile.get(fileKey);
|
||||
entry.platforms.push(platform);
|
||||
entry.htmlParts.push(html);
|
||||
entry.originalAnnotations.push(annotation);
|
||||
} else {
|
||||
// Couldn't parse, treat as non-file annotation
|
||||
nonFileAnnotations.push(annotation);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Non-file annotations (like "zig error")
|
||||
nonFileAnnotations.push(annotation);
|
||||
}
|
||||
}
|
||||
|
||||
// Create merged annotations
|
||||
const mergedAnnotations = [];
|
||||
|
||||
// Add file-based annotations
|
||||
for (const [key, entry] of annotationsByFile) {
|
||||
const { filePath, failureInfo, platforms, htmlParts, originalAnnotations } = entry;
|
||||
|
||||
// If we have multiple platforms with the same content, merge them
|
||||
if (platforms.length > 1) {
|
||||
// Create context string with all platforms
|
||||
const uniquePlatforms = [...new Set(platforms)];
|
||||
const context = `${filePath} - ${failureInfo} on ${uniquePlatforms.join(', ')}`;
|
||||
|
||||
// Check if all HTML parts are identical
|
||||
const firstHtml = htmlParts[0];
|
||||
const allSame = htmlParts.every(html => html === firstHtml);
|
||||
|
||||
let mergedHtml = '';
|
||||
if (allSame) {
|
||||
// If all the same, just use the first one
|
||||
mergedHtml = firstHtml;
|
||||
} else {
|
||||
// If different, try to find one with the most color spans
|
||||
let bestHtml = firstHtml;
|
||||
let maxColorCount = (firstHtml.match(/term-fg/g) || []).length;
|
||||
|
||||
for (const html of htmlParts) {
|
||||
const colorCount = (html.match(/term-fg/g) || []).length;
|
||||
if (colorCount > maxColorCount) {
|
||||
maxColorCount = colorCount;
|
||||
bestHtml = html;
|
||||
}
|
||||
}
|
||||
mergedHtml = bestHtml;
|
||||
}
|
||||
|
||||
mergedAnnotations.push({
|
||||
context,
|
||||
html: mergedHtml,
|
||||
merged: true,
|
||||
platformCount: uniquePlatforms.length
|
||||
});
|
||||
} else {
|
||||
// Single platform, use original
|
||||
mergedAnnotations.push(originalAnnotations[0]);
|
||||
}
|
||||
}
|
||||
|
||||
// Add non-file annotations
|
||||
mergedAnnotations.push(...nonFileAnnotations);
|
||||
|
||||
// Sort annotations: ones with colors at the bottom
|
||||
const annotationsWithColorInfo = mergedAnnotations.map(annotation => {
|
||||
const html = annotation.html || '';
|
||||
const hasColors = html.includes('term-fg') || html.includes('\\x1b[');
|
||||
return { annotation, hasColors };
|
||||
});
|
||||
|
||||
// Sort: no colors first, then colors
|
||||
annotationsWithColorInfo.sort((a, b) => {
|
||||
if (a.hasColors === b.hasColors) return 0;
|
||||
return a.hasColors ? 1 : -1;
|
||||
});
|
||||
|
||||
const sortedAnnotations = annotationsWithColorInfo.map(item => item.annotation);
|
||||
|
||||
// Count failures - look for actual test counts in the content
|
||||
let totalFailures = 0;
|
||||
let totalFlaky = 0;
|
||||
|
||||
// First try to count from annotations
|
||||
for (const annotation of sortedAnnotations) {
|
||||
const isFlaky = annotation.context.toLowerCase().includes('flaky');
|
||||
const html = annotation.html || '';
|
||||
|
||||
// Look for patterns like "X tests failed" or "X failing"
|
||||
const failureMatches = html.match(/(\d+)\s+(tests?\s+failed|failing)/gi);
|
||||
if (failureMatches) {
|
||||
for (const match of failureMatches) {
|
||||
const count = parseInt(match.match(/\d+/)[0]);
|
||||
if (isFlaky) {
|
||||
totalFlaky += count;
|
||||
} else {
|
||||
totalFailures += count;
|
||||
}
|
||||
break; // Only count first match to avoid duplicates
|
||||
}
|
||||
} else if (!isFlaky) {
|
||||
// If no count found, count the annotation itself
|
||||
totalFailures++;
|
||||
}
|
||||
}
|
||||
|
||||
// If no annotations, use job count
|
||||
if (totalFailures === 0 && failedJobs.length > 0) {
|
||||
totalFailures = failedJobs.length;
|
||||
}
|
||||
|
||||
// Display failure count
|
||||
if (totalFailures > 0 || totalFlaky > 0) {
|
||||
if (totalFailures > 0) {
|
||||
console.log(`\n${colors.red}${colors.bold}${totalFailures} test failures${colors.reset}`);
|
||||
}
|
||||
if (showFlaky && totalFlaky > 0) {
|
||||
console.log(`${colors.dim}${totalFlaky} flaky tests${colors.reset}`);
|
||||
}
|
||||
console.log();
|
||||
} else if (failedJobs.length > 0) {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
}
|
||||
|
||||
// Display all annotations
|
||||
console.log();
|
||||
for (const annotation of sortedAnnotations) {
|
||||
// Skip flaky tests unless --flaky flag is set
|
||||
if (!showFlaky && annotation.context.toLowerCase().includes('flaky')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Display context header with background color
|
||||
// For merged annotations, show platform info
|
||||
if (annotation.merged && annotation.platformCount) {
|
||||
// Extract filename and failure info from context
|
||||
const contextParts = annotation.context.match(/^(.+?)\s+-\s+(.+?)\s+on\s+(.+)$/);
|
||||
if (contextParts) {
|
||||
const [, filename, failureInfo, platformsStr] = contextParts;
|
||||
const fileUrl = fileToUrl(filename);
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platformsStr}${colors.reset}`);
|
||||
} else {
|
||||
const fileUrl = fileToUrl(annotation.context);
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
|
||||
}
|
||||
} else {
|
||||
// Single annotation - need to extract platform info from HTML
|
||||
const fileUrl = fileToUrl(annotation.context);
|
||||
|
||||
// Try to extract platform info from the HTML for single platform tests
|
||||
const html = annotation.html || '';
|
||||
const singlePlatformMatch = html.match(/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/);
|
||||
|
||||
if (singlePlatformMatch) {
|
||||
const failureInfo = singlePlatformMatch[2];
|
||||
const platformHtml = singlePlatformMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, '').trim();
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platform}${colors.reset}`);
|
||||
} else {
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
|
||||
}
|
||||
}
|
||||
console.log();
|
||||
|
||||
// Process the annotation HTML to preserve colors
|
||||
const html = annotation.html || '';
|
||||
|
||||
|
||||
// First unescape unicode sequences
|
||||
let unescapedHtml = html
|
||||
.replace(/\\u003c/g, '<')
|
||||
.replace(/\\u003e/g, '>')
|
||||
.replace(/\\u0026/g, '&')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\'/g, "'")
|
||||
.replace(/\\u001b/g, '\x1b'); // Unescape ANSI escape sequences
|
||||
|
||||
// Handle newlines more carefully - BuildKite sometimes has actual newlines that shouldn't be there
|
||||
// Only replace \n if it's actually an escaped newline, not part of the content
|
||||
unescapedHtml = unescapedHtml.replace(/\\n/g, '\n');
|
||||
|
||||
// Also handle escaped ANSI sequences that might appear as \\x1b or \033
|
||||
unescapedHtml = unescapedHtml
|
||||
.replace(/\\\\x1b/g, '\x1b')
|
||||
.replace(/\\033/g, '\x1b');
|
||||
|
||||
// Convert HTML with ANSI color classes to actual ANSI codes
|
||||
const termColors = {
|
||||
// Standard colors (0-7)
|
||||
'term-fg0': '\x1b[30m', // black
|
||||
'term-fg1': '\x1b[31m', // red
|
||||
'term-fg2': '\x1b[32m', // green
|
||||
'term-fg3': '\x1b[33m', // yellow
|
||||
'term-fg4': '\x1b[34m', // blue
|
||||
'term-fg5': '\x1b[35m', // magenta
|
||||
'term-fg6': '\x1b[36m', // cyan
|
||||
'term-fg7': '\x1b[37m', // white
|
||||
// Also support 30-37 format
|
||||
'term-fg30': '\x1b[30m', // black
|
||||
'term-fg31': '\x1b[31m', // red
|
||||
'term-fg32': '\x1b[32m', // green
|
||||
'term-fg33': '\x1b[33m', // yellow
|
||||
'term-fg34': '\x1b[34m', // blue
|
||||
'term-fg35': '\x1b[35m', // magenta
|
||||
'term-fg36': '\x1b[36m', // cyan
|
||||
'term-fg37': '\x1b[37m', // white
|
||||
// Bright colors with 'i' prefix
|
||||
'term-fgi90': '\x1b[90m', // bright black
|
||||
'term-fgi91': '\x1b[91m', // bright red
|
||||
'term-fgi92': '\x1b[92m', // bright green
|
||||
'term-fgi93': '\x1b[93m', // bright yellow
|
||||
'term-fgi94': '\x1b[94m', // bright blue
|
||||
'term-fgi95': '\x1b[95m', // bright magenta
|
||||
'term-fgi96': '\x1b[96m', // bright cyan
|
||||
'term-fgi97': '\x1b[97m', // bright white
|
||||
// Also support without 'i'
|
||||
'term-fg90': '\x1b[90m', // bright black
|
||||
'term-fg91': '\x1b[91m', // bright red
|
||||
'term-fg92': '\x1b[92m', // bright green
|
||||
'term-fg93': '\x1b[93m', // bright yellow
|
||||
'term-fg94': '\x1b[94m', // bright blue
|
||||
'term-fg95': '\x1b[95m', // bright magenta
|
||||
'term-fg96': '\x1b[96m', // bright cyan
|
||||
'term-fg97': '\x1b[97m', // bright white
|
||||
// Background colors
|
||||
'term-bg40': '\x1b[40m', // black
|
||||
'term-bg41': '\x1b[41m', // red
|
||||
'term-bg42': '\x1b[42m', // green
|
||||
'term-bg43': '\x1b[43m', // yellow
|
||||
'term-bg44': '\x1b[44m', // blue
|
||||
'term-bg45': '\x1b[45m', // magenta
|
||||
'term-bg46': '\x1b[46m', // cyan
|
||||
'term-bg47': '\x1b[47m', // white
|
||||
// Text styles
|
||||
'term-bold': '\x1b[1m',
|
||||
'term-dim': '\x1b[2m',
|
||||
'term-italic': '\x1b[3m',
|
||||
'term-underline': '\x1b[4m',
|
||||
};
|
||||
|
||||
let text = unescapedHtml;
|
||||
|
||||
|
||||
// Convert color spans to ANSI codes if TTY
|
||||
if (isTTY) {
|
||||
// Convert spans with color classes to ANSI codes
|
||||
for (const [className, ansiCode] of Object.entries(termColors)) {
|
||||
// Match spans that contain the class name (might have multiple classes)
|
||||
// Need to handle both formats: <span class="..."> and <span ... class="...">
|
||||
const regex = new RegExp(`<span[^>]*class="[^"]*\\b${className}\\b[^"]*"[^>]*>([\\s\\S]*?)</span>`, 'g');
|
||||
text = text.replace(regex, (match, content) => {
|
||||
// Don't add reset if the content already has ANSI codes
|
||||
if (content.includes('\x1b[')) {
|
||||
return `${ansiCode}${content}`;
|
||||
}
|
||||
return `${ansiCode}${content}${colors.reset}`;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we already have ANSI codes in the text after processing
|
||||
const hasExistingAnsi = text.includes('\x1b[');
|
||||
|
||||
// Check for broken color patterns (single characters wrapped in colors)
|
||||
// If we see patterns like green[, red text, green], it's likely broken
|
||||
// Also check for patterns like: green[, then reset, then text, then red text, then reset, then green]
|
||||
const hasBrokenColors = text.includes('\x1b[32m[') || text.includes('\x1b[32m]') ||
|
||||
(text.includes('\x1b[32m✓') && text.includes('\x1b[31m') && text.includes('ms]'));
|
||||
|
||||
if (hasBrokenColors) {
|
||||
// Remove all ANSI codes if the coloring looks broken
|
||||
text = text.replace(/\x1b\[[0-9;]*m/g, '');
|
||||
}
|
||||
|
||||
// Remove all HTML tags, but be careful with existing ANSI codes
|
||||
text = text
|
||||
.replace(/<pre[^>]*><code[^>]*>([\s\S]*?)<\/code><\/pre>/g, '$1')
|
||||
.replace(/<br\s*\/?>/g, '\n')
|
||||
.replace(/<\/p>/g, '\n')
|
||||
.replace(/<p>/g, '')
|
||||
.replace(/<[^>]+>/g, '')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/\u00A0/g, ' ') // Non-breaking space
|
||||
.trim();
|
||||
|
||||
|
||||
// Remove excessive blank lines - be more aggressive
|
||||
text = text.replace(/\n\s*\n\s*\n+/g, '\n\n'); // Replace 3+ newlines with 2
|
||||
text = text.replace(/\n\s*\n/g, '\n'); // Replace 2 newlines with 1
|
||||
|
||||
// For zig error annotations, check if there are multiple platform sections
|
||||
let handled = false;
|
||||
if (annotation.context.includes('zig error')) {
|
||||
// Split by platform headers within the content
|
||||
const platformSections = text.split(/(?=^\s*[^\s\/]+\.zig\s*-\s*zig error\s+on\s+)/m);
|
||||
|
||||
if (platformSections.length > 1) {
|
||||
// Skip the first empty section if it exists
|
||||
const sections = platformSections.filter(s => s.trim());
|
||||
|
||||
if (sections.length > 1) {
|
||||
// We have multiple platform errors in one annotation
|
||||
// Extract unique platform names
|
||||
const platforms = [];
|
||||
for (const section of sections) {
|
||||
const platformMatch = section.match(/on\s+(\S+)/);
|
||||
if (platformMatch) {
|
||||
platforms.push(platformMatch[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// Show combined header with background color
|
||||
const filename = annotation.context;
|
||||
const fileUrl = fileToUrl(filename);
|
||||
const platformText = platforms.join(', ');
|
||||
console.log(`${colors.bgRed}${colors.white}${colors.bold} ${fileUrl} ${colors.reset} ${colors.dim}on ${platformText}${colors.reset}`);
|
||||
console.log();
|
||||
|
||||
// Show only the first error detail (they're the same)
|
||||
const firstError = sections[0];
|
||||
const errorLines = firstError.split('\n');
|
||||
|
||||
// Skip the platform-specific header line and remove excessive blank lines
|
||||
let previousWasBlank = false;
|
||||
for (let i = 0; i < errorLines.length; i++) {
|
||||
const line = errorLines[i];
|
||||
if (i === 0 && line.match(/\.zig\s*-\s*zig error\s+on\s+/)) {
|
||||
continue; // Skip platform header
|
||||
}
|
||||
|
||||
// Skip multiple consecutive blank lines
|
||||
const isBlank = line.trim() === '';
|
||||
if (isBlank && previousWasBlank) {
|
||||
continue;
|
||||
}
|
||||
previousWasBlank = isBlank;
|
||||
|
||||
console.log(line); // No indentation
|
||||
}
|
||||
console.log();
|
||||
handled = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Normal processing for other annotations
|
||||
if (!handled) {
|
||||
// For merged annotations, skip the duplicate headers within the content
|
||||
const isMerged = annotation.merged || (annotation.platformCount && annotation.platformCount > 1);
|
||||
|
||||
// Process lines, removing excessive blank lines
|
||||
let previousWasBlank = false;
|
||||
text.split('\n').forEach((line, index) => {
|
||||
// For merged annotations, skip duplicate platform headers
|
||||
if (isMerged && index > 0 && line.match(/^[^\s\/]+\.(ts|js|tsx|jsx|zig)\s*-\s*\d+\s+(failing|errors?|warnings?)\s+on\s+/)) {
|
||||
return; // Skip duplicate headers in merged content
|
||||
}
|
||||
|
||||
// Skip multiple consecutive blank lines
|
||||
const isBlank = line.trim() === '';
|
||||
if (isBlank && previousWasBlank) {
|
||||
return;
|
||||
}
|
||||
previousWasBlank = isBlank;
|
||||
|
||||
console.log(line); // No indentation
|
||||
});
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed to parse annotations:", e);
|
||||
console.log("\nView detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
} else {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
console.log("View detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
63
scripts/gamble.ts
Executable file
63
scripts/gamble.ts
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bun
|
||||
// usage: bun scripts/gamble.ts <number of attempts> <timeout in seconds> <command>
|
||||
|
||||
import assert from "node:assert";
|
||||
|
||||
const attempts = parseInt(process.argv[2]);
|
||||
const timeout = parseFloat(process.argv[3]);
|
||||
const argv = process.argv.slice(4);
|
||||
|
||||
let numTimedOut = 0;
|
||||
const signals = new Map<string, number>();
|
||||
const codes = new Map<number, number>();
|
||||
let numOk = 0;
|
||||
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
const proc = Bun.spawn({
|
||||
cmd: argv,
|
||||
timeout: 1000 * timeout,
|
||||
stdin: null,
|
||||
stdout: "ignore",
|
||||
stderr: "pipe",
|
||||
});
|
||||
await proc.exited;
|
||||
const errors = await new Response(proc.stderr).text();
|
||||
|
||||
const { signalCode: signal, exitCode } = proc;
|
||||
|
||||
if (signal === "SIGTERM") {
|
||||
// sent for timeouts
|
||||
numTimedOut += 1;
|
||||
} else if (signal) {
|
||||
const newCount = 1 + (signals.get(signal) ?? 0);
|
||||
signals.set(signal, newCount);
|
||||
} else if (exitCode !== 0) {
|
||||
// if null there should have been a signal
|
||||
assert(exitCode !== null);
|
||||
const newCount = 1 + (codes.get(exitCode) ?? 0);
|
||||
codes.set(exitCode, newCount);
|
||||
} else {
|
||||
numOk += 1;
|
||||
}
|
||||
if (exitCode !== 0) console.log(errors);
|
||||
process.stdout.write(exitCode === 0 ? "." : "!");
|
||||
}
|
||||
process.stdout.write("\n");
|
||||
|
||||
const width = attempts.toString().length;
|
||||
const pad = (num: number): string => num.toString().padStart(width, " ");
|
||||
const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`);
|
||||
const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`);
|
||||
|
||||
green(`${pad(numOk)}/${attempts} OK`);
|
||||
if (numTimedOut > 0) {
|
||||
red(`${pad(numTimedOut)}/${attempts} timeout`);
|
||||
}
|
||||
for (const [signal, count] of signals.entries()) {
|
||||
red(`${pad(count)}/${attempts} ${signal}`);
|
||||
}
|
||||
for (const [code, count] of codes.entries()) {
|
||||
red(`${pad(count)}/${attempts} code ${code}`);
|
||||
}
|
||||
|
||||
process.exit(numOk === attempts ? 0 : 1);
|
||||
125
scripts/longest.js
Normal file
125
scripts/longest.js
Normal file
@@ -0,0 +1,125 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
// Regex patterns for different types of top-level declarations
|
||||
const DECLARATION_PATTERN =
|
||||
// pub? (export|extern)? (const|fn|var) name
|
||||
/^(pub\s+)?(export\s+|extern\s+)?(const|fn|var)\s+([a-zA-Z_][a-zA-Z0-9_]*)/;
|
||||
|
||||
function findDeclarations(filePath) {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
const lines = content.split("\n");
|
||||
const declarations = [];
|
||||
|
||||
// First pass: collect all declarations with their line numbers
|
||||
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
|
||||
const line = lines[lineNum];
|
||||
|
||||
// Skip empty lines and comments
|
||||
if (!line || line.trim().startsWith("//") || line.trim().startsWith("///")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only process top-level declarations (no indentation)
|
||||
if (line.startsWith(" ") || line.startsWith("\t")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const trimmedLine = line.trim();
|
||||
|
||||
// Check each pattern
|
||||
const match = trimmedLine.match(DECLARATION_PATTERN);
|
||||
if (match) {
|
||||
// Extract the name from the match
|
||||
const name = match[match.length - 1]; // Last capture group is the name
|
||||
|
||||
declarations.push({
|
||||
name,
|
||||
match: match[0],
|
||||
line: lineNum + 1,
|
||||
type: getDeclarationType(match[0]),
|
||||
fullLine: trimmedLine,
|
||||
startLine: lineNum,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: calculate sizes based on next declaration's start line
|
||||
for (let i = 0; i < declarations.length; i++) {
|
||||
const currentDecl = declarations[i];
|
||||
const nextDecl = declarations[i + 1];
|
||||
|
||||
if (nextDecl) {
|
||||
// Size is from current declaration start to next declaration start
|
||||
currentDecl.size = nextDecl.startLine - currentDecl.startLine;
|
||||
} else {
|
||||
// Last declaration: size is from current declaration start to end of file
|
||||
currentDecl.size = lines.length - currentDecl.startLine;
|
||||
}
|
||||
}
|
||||
|
||||
return declarations;
|
||||
}
|
||||
|
||||
function getDeclarationType(matchText) {
|
||||
if (matchText.includes("const")) return "const";
|
||||
if (matchText.includes("fn")) return "fn";
|
||||
if (matchText.includes("var")) return "var";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("Usage: bun longest.js <zig-file>");
|
||||
console.error("Example: bun longest.js src/walker_skippable.zig");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filePath = args[0];
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.error(`File not found: ${filePath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!filePath.endsWith(".zig")) {
|
||||
console.error("Please provide a .zig file");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const declarations = findDeclarations(filePath);
|
||||
|
||||
if (declarations.length === 0) {
|
||||
console.log("No top-level declarations found.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${declarations.length} top-level declarations in ${filePath}:\n`);
|
||||
|
||||
// Sort by declaration size (smallest first)
|
||||
declarations.sort((a, b) => a.size - b.size);
|
||||
|
||||
// Find the longest name for formatting
|
||||
const maxNameLength = Math.max(...declarations.map(d => d.match.length));
|
||||
const maxTypeLength = Math.max(...declarations.map(d => d.type.length));
|
||||
|
||||
console.log(`${"Name".padEnd(maxNameLength + 2)} ${"Type".padEnd(maxTypeLength + 2)} ${"Num Lines".padEnd(6)}`);
|
||||
console.log("-".repeat(maxNameLength + maxTypeLength + 15));
|
||||
|
||||
declarations.forEach(decl => {
|
||||
console.log(
|
||||
`${decl.match.padEnd(maxNameLength + 2)} ${decl.type.padEnd(maxTypeLength + 2)} ${decl.size.toString().padEnd(6)}`,
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error reading file:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
@@ -230,6 +230,27 @@ function getTestExpectations() {
|
||||
return expectations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateExceptions = (() => {
|
||||
let skipArray;
|
||||
return test => {
|
||||
if (!skipArray) {
|
||||
const path = join(cwd, "test/no-validate-exceptions.txt");
|
||||
if (!existsSync(path)) {
|
||||
skipArray = [];
|
||||
}
|
||||
skipArray = readFileSync(path, "utf-8")
|
||||
.split("\n")
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
}
|
||||
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
|
||||
};
|
||||
})();
|
||||
|
||||
/**
|
||||
* @param {string} testPath
|
||||
* @returns {string[]}
|
||||
@@ -416,16 +437,20 @@ async function runTests() {
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env: {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
@@ -953,13 +978,18 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
|
||||
testArgs.push(absPath);
|
||||
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env: {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
|
||||
394
scripts/sortImports.ts
Normal file
394
scripts/sortImports.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
const filePaths = args.filter(arg => !arg.startsWith("-"));
|
||||
const usage = String.raw`
|
||||
__ .__ __
|
||||
____________________/ |_ _______|__| _____ ______ ____________/ |_ ______
|
||||
\___ / _ \_ __ \ __\ \___ / |/ \\____ \ / _ \_ __ \ __\/ ___/
|
||||
/ ( <_> ) | \/| | / /| | Y Y \ |_> > <_> ) | \/| | \___ \
|
||||
/_____ \____/|__| |__| /_____ \__|__|_| / __/ \____/|__| |__| /____ >
|
||||
\/ \/ \/|__| \/
|
||||
|
||||
Usage: bun scripts/sortImports [options] <files...>
|
||||
|
||||
Options:
|
||||
--help Show this help message
|
||||
--no-include-pub Exclude pub imports from sorting
|
||||
--no-remove-unused Don't remove unused imports
|
||||
--include-unsorted Process files even if they don't have @sortImports marker
|
||||
|
||||
Examples:
|
||||
bun scripts/sortImports src
|
||||
`.slice(1);
|
||||
if (args.includes("--help")) {
|
||||
console.log(usage);
|
||||
process.exit(0);
|
||||
}
|
||||
if (filePaths.length === 0) {
|
||||
console.error(usage);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const config = {
|
||||
includePub: !args.includes("--no-include-pub"),
|
||||
removeUnused: !args.includes("--no-remove-unused"),
|
||||
includeUnsorted: args.includes("--include-unsorted"),
|
||||
};
|
||||
|
||||
// Type definitions
|
||||
type Declaration = {
|
||||
index: number;
|
||||
key: string;
|
||||
value: string;
|
||||
segments: string[] | null;
|
||||
whole: string;
|
||||
last?: string;
|
||||
wholepath?: string[];
|
||||
};
|
||||
|
||||
// Parse declarations from the file
|
||||
function parseDeclarations(
|
||||
lines: string[],
|
||||
fileContents: string,
|
||||
): {
|
||||
declarations: Map<string, Declaration>;
|
||||
unusedLineIndices: number[];
|
||||
} {
|
||||
const declarations = new Map<string, Declaration>();
|
||||
const unusedLineIndices: number[] = [];
|
||||
|
||||
// for stability
|
||||
const sortedLineKeys = [...lines.keys()].sort((a, b) => (lines[a] < lines[b] ? -1 : lines[a] > lines[b] ? 1 : 0));
|
||||
|
||||
for (const i of sortedLineKeys) {
|
||||
const line = lines[i];
|
||||
|
||||
if (line === "// @sortImports") {
|
||||
lines[i] = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
|
||||
const match = line.match(inlineDeclPattern);
|
||||
|
||||
if (!match) continue;
|
||||
|
||||
const name = match[1];
|
||||
const value = match[2];
|
||||
|
||||
// Skip if the previous line has a doc comment
|
||||
const prevLine = lines[i - 1] ?? "";
|
||||
if (prevLine.startsWith("///")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip unused declarations (non-public declarations that appear only once)
|
||||
if (config.removeUnused && !line.includes("pub ")) {
|
||||
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const expectedCount = (line.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
|
||||
const actualCount = (fileContents.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
|
||||
if (expectedCount === actualCount) {
|
||||
// unused decl
|
||||
unusedLineIndices.push(i);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!config.includePub && line.includes("pub ")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
declarations.set(name, {
|
||||
whole: line,
|
||||
index: i,
|
||||
key: name,
|
||||
value,
|
||||
segments: parseSegments(value),
|
||||
});
|
||||
}
|
||||
|
||||
return { declarations, unusedLineIndices };
|
||||
}
|
||||
|
||||
// Validate if a segment is a valid identifier
|
||||
function isValidSegment(segment: string): boolean {
|
||||
if (segment.startsWith("@import(") || segment === "@This()") {
|
||||
return true;
|
||||
}
|
||||
return segment.match(/^[a-zA-Z0-9_]+$/) != null;
|
||||
}
|
||||
|
||||
// Parse import path segments from a value
|
||||
function parseSegments(value: string): null | string[] {
|
||||
if (value.startsWith("@import(")) {
|
||||
const rightBracketIndex = value.indexOf(")");
|
||||
if (rightBracketIndex === -1) return null;
|
||||
|
||||
const importPart = value.slice(0, rightBracketIndex + 1);
|
||||
const remainingPart = value.slice(rightBracketIndex + 1);
|
||||
|
||||
if (remainingPart.startsWith(".")) {
|
||||
const segments = remainingPart.slice(1).split(".");
|
||||
if (!segments.every(segment => isValidSegment(segment))) return null;
|
||||
return [importPart, ...segments];
|
||||
} else if (remainingPart === "") {
|
||||
return [importPart];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
const segments = value.split(".");
|
||||
if (!segments.every(segment => isValidSegment(segment))) return null;
|
||||
return segments;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the first segment of an import path
|
||||
function resolveFirstSegment(firstSegment: string, declarations: Map<string, Declaration>): null | string[] {
|
||||
if (firstSegment.startsWith("@import(") || firstSegment.startsWith("@This()")) {
|
||||
return [firstSegment];
|
||||
} else {
|
||||
const declaration = declarations.get(firstSegment);
|
||||
if (!declaration) {
|
||||
return null; // Unknown declaration
|
||||
}
|
||||
|
||||
const subFirstSegment = declaration.segments?.[0];
|
||||
if (!subFirstSegment) {
|
||||
return null; // Invalid declaration
|
||||
}
|
||||
|
||||
const resolvedSubFirst = resolveFirstSegment(subFirstSegment, declarations);
|
||||
if (!resolvedSubFirst) {
|
||||
return null; // Unable to resolve
|
||||
}
|
||||
|
||||
return [...resolvedSubFirst, ...(declaration.segments?.slice(1) ?? [])];
|
||||
}
|
||||
}
|
||||
|
||||
type Group = {
|
||||
keySegments: string[];
|
||||
declarations: Declaration[];
|
||||
};
|
||||
|
||||
// Group declarations by their import paths
|
||||
function groupDeclarationsByImportPath(declarations: Map<string, Declaration>): Map<string, Group> {
|
||||
const groups = new Map<string, Group>();
|
||||
|
||||
for (const declaration of declarations.values()) {
|
||||
if (!declaration.segments || declaration.segments.length < 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const firstSegment = declaration.segments[0];
|
||||
const resolvedFirst = resolveFirstSegment(firstSegment, declarations);
|
||||
|
||||
if (!resolvedFirst) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const remainingSegments = declaration.segments.slice(1);
|
||||
const fullPath = [...resolvedFirst, ...remainingSegments];
|
||||
const lastSegment = fullPath.pop();
|
||||
|
||||
if (!lastSegment) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const groupKey = fullPath.join(".");
|
||||
if (!groups.has(groupKey)) {
|
||||
groups.set(groupKey, { keySegments: fullPath, declarations: [] });
|
||||
}
|
||||
|
||||
groups.get(groupKey)!.declarations.push(declaration);
|
||||
declaration.last = lastSegment;
|
||||
declaration.wholepath = [...fullPath, lastSegment];
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
// Merge single-item groups into their parent groups
|
||||
function mergeSingleItemGroups(groups: Map<string, Group>): void {
|
||||
while (true) {
|
||||
let hasChanges = false;
|
||||
|
||||
for (const [groupKey, group] of groups.entries()) {
|
||||
if (group.declarations.length === 1) {
|
||||
const gcsplit = [...group.keySegments];
|
||||
while (gcsplit.pop()) {
|
||||
const parentKey = gcsplit.join(".");
|
||||
if (groups.has(parentKey)) {
|
||||
groups.get(parentKey)!.declarations.push(group.declarations[0]);
|
||||
groups.delete(groupKey);
|
||||
hasChanges = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasChanges) break;
|
||||
}
|
||||
}
|
||||
|
||||
// Move items with child groups to the top of those child groups
|
||||
function promoteItemsWithChildGroups(groups: Map<string, Group>): void {
|
||||
for (const [groupKey, group] of groups.entries()) {
|
||||
for (let i = 0; i < group.declarations.length; ) {
|
||||
const item = group.declarations[i];
|
||||
const childGroupKey = (groupKey ? groupKey + "." : "") + item.last;
|
||||
|
||||
if (groups.has(childGroupKey)) {
|
||||
groups.get(childGroupKey)!.declarations.unshift(item);
|
||||
group.declarations.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort groups and their declarations
|
||||
function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
|
||||
// Sort declarations within each group
|
||||
for (const group of groups.values()) {
|
||||
group.declarations.sort((a, b) => {
|
||||
if (a.wholepath?.length !== b.wholepath?.length) {
|
||||
return (a.wholepath?.length ?? 0) - (b.wholepath?.length ?? 0);
|
||||
}
|
||||
return a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Sort group keys alphabetically
|
||||
return Array.from(groups.keys()).sort((a, b) => {
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Generate the sorted output
|
||||
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
|
||||
const outputLines = [...lines];
|
||||
outputLines.push("");
|
||||
outputLines.push("// @sortImports");
|
||||
|
||||
for (const groupKey of sortedGroupKeys) {
|
||||
const groupDeclarations = groups.get(groupKey)!;
|
||||
if (!groupDeclarations?.declarations.length) continue;
|
||||
|
||||
// Add spacing between groups
|
||||
outputLines.push("");
|
||||
|
||||
// Add declarations to output and mark original lines for removal
|
||||
for (const declaration of groupDeclarations.declarations) {
|
||||
outputLines.push(declaration.whole);
|
||||
outputLines[declaration.index] = "";
|
||||
}
|
||||
}
|
||||
|
||||
return outputLines;
|
||||
}
|
||||
|
||||
// Main execution function for a single file
|
||||
async function processFile(filePath: string): Promise<void> {
|
||||
const originalFileContents = await Bun.file(filePath).text();
|
||||
let fileContents = originalFileContents;
|
||||
|
||||
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
|
||||
return;
|
||||
}
|
||||
console.log(`Processing: ${filePath}`);
|
||||
|
||||
let needsRecurse = true;
|
||||
while (needsRecurse) {
|
||||
needsRecurse = false;
|
||||
|
||||
const lines = fileContents.split("\n");
|
||||
|
||||
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
|
||||
const groups = groupDeclarationsByImportPath(declarations);
|
||||
|
||||
promoteItemsWithChildGroups(groups);
|
||||
mergeSingleItemGroups(groups);
|
||||
const sortedGroupKeys = sortGroupsAndDeclarations(groups);
|
||||
|
||||
const sortedLines = generateSortedOutput(lines, groups, sortedGroupKeys);
|
||||
|
||||
// Remove unused declarations
|
||||
if (config.removeUnused) {
|
||||
for (const line of unusedLineIndices) {
|
||||
sortedLines[line] = "";
|
||||
needsRecurse = true;
|
||||
}
|
||||
}
|
||||
fileContents = sortedLines.join("\n");
|
||||
}
|
||||
|
||||
// Remove any leading newlines
|
||||
fileContents = fileContents.replace(/^\n+/, "");
|
||||
|
||||
// Maximum of one empty line
|
||||
fileContents = fileContents.replace(/\n\n+/g, "\n\n");
|
||||
|
||||
// Ensure exactly one trailing newline
|
||||
fileContents = fileContents.replace(/\s*$/, "\n");
|
||||
|
||||
// If the file is empty, remove the trailing newline
|
||||
if (fileContents === "\n") fileContents = "";
|
||||
|
||||
if (fileContents === originalFileContents) {
|
||||
console.log(`✓ No changes: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Write the sorted file
|
||||
await Bun.write(filePath, fileContents);
|
||||
|
||||
console.log(`✓ Done: ${filePath}`);
|
||||
}
|
||||
|
||||
// Process all files
|
||||
async function main() {
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const stat = await Bun.file(filePath).stat();
|
||||
if (stat.isDirectory()) {
|
||||
const files = readdirSync(filePath, { recursive: true });
|
||||
for (const file of files) {
|
||||
if (typeof file !== "string" || !file.endsWith(".zig")) continue;
|
||||
try {
|
||||
await processFile(path.join(filePath, file));
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await processFile(filePath);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nSummary: ${successCount} files processed successfully, ${errorCount} errors`);
|
||||
|
||||
if (errorCount > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -219,6 +219,8 @@ comptime {
|
||||
}
|
||||
|
||||
pub export fn Bun__onExit() void {
|
||||
bun.JSC.Node.FSEvents.closeAndWait();
|
||||
|
||||
runExitCallbacks();
|
||||
Output.flush();
|
||||
std.mem.doNotOptimizeAway(&Bun__atexit);
|
||||
|
||||
@@ -21,6 +21,7 @@ side: ?bun.bake.Side,
|
||||
/// entrypoint like sourcemaps and bytecode
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
source_index: Index.Optional = .none,
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
|
||||
@@ -62,11 +63,19 @@ pub const FileOperation = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Kind = @typeInfo(Value).Union.tag_type.?;
|
||||
pub const Kind = enum {
|
||||
move,
|
||||
copy,
|
||||
noop,
|
||||
buffer,
|
||||
pending,
|
||||
saved,
|
||||
};
|
||||
|
||||
// TODO: document how and why all variants of this union(enum) are used,
|
||||
// specifically .move and .copy; the new bundler has to load files in memory
|
||||
// in order to hash them, so i think it uses .buffer for those
|
||||
pub const Value = union(enum) {
|
||||
pub const Value = union(Kind) {
|
||||
move: FileOperation,
|
||||
copy: FileOperation,
|
||||
noop: u0,
|
||||
@@ -177,6 +186,7 @@ pub const Options = struct {
|
||||
source_map_index: ?u32 = null,
|
||||
bytecode_index: ?u32 = null,
|
||||
output_path: string,
|
||||
source_index: Index.Optional = .none,
|
||||
size: ?usize = null,
|
||||
input_path: []const u8 = "",
|
||||
display_size: u32 = 0,
|
||||
@@ -205,6 +215,7 @@ pub fn init(options: Options) OutputFile {
|
||||
.input_loader = options.input_loader,
|
||||
.src_path = Fs.Path.init(options.input_path),
|
||||
.dest_path = options.output_path,
|
||||
.source_index = options.source_index,
|
||||
.size = options.size orelse switch (options.data) {
|
||||
.buffer => |buf| buf.data.len,
|
||||
.file => |file| file.size,
|
||||
@@ -310,7 +321,7 @@ pub fn toJS(
|
||||
) bun.JSC.JSValue {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => JSC.JSValue.undefined,
|
||||
.noop => .js_undefined,
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
|
||||
@@ -97,6 +97,12 @@ pub const StandaloneModuleGraph = struct {
|
||||
encoding: Encoding = .latin1,
|
||||
loader: bun.options.Loader = .file,
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
};
|
||||
|
||||
pub const FileSide = enum(u8) {
|
||||
server = 0,
|
||||
client = 1,
|
||||
};
|
||||
|
||||
pub const Encoding = enum(u8) {
|
||||
@@ -141,6 +147,11 @@ pub const StandaloneModuleGraph = struct {
|
||||
wtf_string: bun.String = bun.String.empty,
|
||||
bytecode: []u8 = "",
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
|
||||
pub fn appearsInEmbeddedFilesArray(this: *const File) bool {
|
||||
return this.side == .client or !this.loader.isJavaScriptLike();
|
||||
}
|
||||
|
||||
pub fn stat(this: *const File) bun.Stat {
|
||||
var result = std.mem.zeroes(bun.Stat);
|
||||
@@ -300,6 +311,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.none,
|
||||
.bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{},
|
||||
.module_format = module.module_format,
|
||||
.side = module.side,
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -347,8 +359,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256;
|
||||
} else {
|
||||
if (entry_point_id == null) {
|
||||
if (output_file.output_kind == .@"entry-point") {
|
||||
entry_point_id = module_count;
|
||||
if (output_file.side == null or output_file.side.? == .server) {
|
||||
if (output_file.output_kind == .@"entry-point") {
|
||||
entry_point_id = module_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -421,6 +435,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
else => .none,
|
||||
} else .none,
|
||||
.bytecode = bytecode,
|
||||
.side = switch (output_file.side orelse .server) {
|
||||
.server => .server,
|
||||
.client => .client,
|
||||
},
|
||||
};
|
||||
|
||||
if (output_file.source_map_index != std.math.maxInt(u32)) {
|
||||
@@ -839,7 +857,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.fromStdDir(root_dir),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
|
||||
) catch |err| {
|
||||
if (err == error.IsDir) {
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) });
|
||||
|
||||
@@ -46,7 +46,7 @@ pub const HashType = u32;
|
||||
const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex);
|
||||
|
||||
/// Initializes a watcher. Each watcher is tied to some context type, which
|
||||
/// recieves watch callbacks on the watcher thread. This function does not
|
||||
/// receives watch callbacks on the watcher thread. This function does not
|
||||
/// actually start the watcher thread.
|
||||
///
|
||||
/// const watcher = try Watcher.init(T, instance_of_t, fs, bun.default_allocator)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
const AllocationScope = @This();
|
||||
|
||||
pub const enabled = bun.Environment.isDebug;
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
|
||||
parent: Allocator,
|
||||
state: if (enabled) struct {
|
||||
@@ -196,7 +196,7 @@ pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void {
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
if (scope.state.allocations.getPtr(cast_ptr)) |owned| {
|
||||
Output.debugWarn("Pointer allocated here:");
|
||||
Output.warn("Pointer allocated here:");
|
||||
bun.crash_handler.dumpStackTrace(owned.allocated_at.trace(), trace_limits, trace_limits);
|
||||
}
|
||||
@panic("this pointer was owned by the allocation scope when it was not supposed to be");
|
||||
@@ -214,8 +214,22 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add
|
||||
|
||||
/// Call when the pointer from `trackExternalAllocation` is freed.
|
||||
/// Returns true if the free was invalid.
|
||||
pub fn trackExternalFree(scope: *AllocationScope, ptr: []const u8, ret_addr: ?usize) bool {
|
||||
pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool {
|
||||
if (!enabled) return;
|
||||
const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) {
|
||||
.pointer => |p| switch (p.size) {
|
||||
.slice => brk: {
|
||||
if (p.child != u8) @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice)));
|
||||
if (p.sentinel_ptr == null) break :brk slice;
|
||||
// Ensure we include the sentinel value
|
||||
break :brk slice[0 .. slice.len + 1];
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
};
|
||||
// Empty slice usually means invalid pointer
|
||||
if (ptr.len == 0) return false;
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
return trackFreeAssumeLocked(scope, ptr, ret_addr orelse @returnAddress());
|
||||
|
||||
@@ -1612,6 +1612,23 @@ pub const Api = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const UnhandledRejections = enum(u8) {
|
||||
strict = 0,
|
||||
throw = 1,
|
||||
warn = 2,
|
||||
none = 3,
|
||||
warn_with_error_code = 4,
|
||||
bun = 5,
|
||||
|
||||
pub const map = bun.ComptimeStringMap(UnhandledRejections, .{
|
||||
.{ "strict", .strict },
|
||||
.{ "throw", .throw },
|
||||
.{ "warn", .warn },
|
||||
.{ "none", .none },
|
||||
.{ "warn-with-error-code", .warn_with_error_code },
|
||||
});
|
||||
};
|
||||
|
||||
pub const TransformOptions = struct {
|
||||
/// jsx
|
||||
jsx: ?Jsx = null,
|
||||
@@ -1709,6 +1726,8 @@ pub const Api = struct {
|
||||
|
||||
// from --no-addons. null == true
|
||||
allow_addons: ?bool = null,
|
||||
/// from --unhandled-rejections, default is 'bun'
|
||||
unhandled_rejections: ?UnhandledRejections = null,
|
||||
|
||||
bunfig_path: []const u8,
|
||||
|
||||
@@ -3002,6 +3021,8 @@ pub const Api = struct {
|
||||
|
||||
ignore_scripts: ?bool = null,
|
||||
|
||||
link_workspace_packages: ?bool = null,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!BunInstall {
|
||||
var this = std.mem.zeroes(BunInstall);
|
||||
|
||||
|
||||
96
src/ast/ASTMemoryAllocator.zig
Normal file
96
src/ast/ASTMemoryAllocator.zig
Normal file
@@ -0,0 +1,96 @@
|
||||
const SFA = std.heap.StackFallbackAllocator(@min(8192, std.heap.page_size_min));
|
||||
|
||||
stack_allocator: SFA = undefined,
|
||||
bump_allocator: std.mem.Allocator = undefined,
|
||||
allocator: std.mem.Allocator,
|
||||
previous: ?*ASTMemoryAllocator = null,
|
||||
|
||||
pub fn enter(this: *ASTMemoryAllocator, allocator: std.mem.Allocator) ASTMemoryAllocator.Scope {
|
||||
this.allocator = allocator;
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = allocator,
|
||||
.fixed_buffer_allocator = undefined,
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
this.previous = null;
|
||||
var ast_scope = ASTMemoryAllocator.Scope{
|
||||
.current = this,
|
||||
.previous = Stmt.Data.Store.memory_allocator,
|
||||
};
|
||||
ast_scope.enter();
|
||||
return ast_scope;
|
||||
}
|
||||
pub const Scope = struct {
|
||||
current: ?*ASTMemoryAllocator = null,
|
||||
previous: ?*ASTMemoryAllocator = null,
|
||||
|
||||
pub fn enter(this: *@This()) void {
|
||||
bun.debugAssert(Expr.Data.Store.memory_allocator == Stmt.Data.Store.memory_allocator);
|
||||
|
||||
this.previous = Expr.Data.Store.memory_allocator;
|
||||
|
||||
const current = this.current;
|
||||
|
||||
Expr.Data.Store.memory_allocator = current;
|
||||
Stmt.Data.Store.memory_allocator = current;
|
||||
|
||||
if (current == null) {
|
||||
Stmt.Data.Store.begin();
|
||||
Expr.Data.Store.begin();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exit(this: *const @This()) void {
|
||||
Expr.Data.Store.memory_allocator = this.previous;
|
||||
Stmt.Data.Store.memory_allocator = this.previous;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn reset(this: *ASTMemoryAllocator) void {
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = this.allocator,
|
||||
.fixed_buffer_allocator = undefined,
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
pub fn push(this: *ASTMemoryAllocator) void {
|
||||
Stmt.Data.Store.memory_allocator = this;
|
||||
Expr.Data.Store.memory_allocator = this;
|
||||
}
|
||||
|
||||
pub fn pop(this: *ASTMemoryAllocator) void {
|
||||
const prev = this.previous;
|
||||
bun.assert(prev != this);
|
||||
Stmt.Data.Store.memory_allocator = prev;
|
||||
Expr.Data.Store.memory_allocator = prev;
|
||||
this.previous = null;
|
||||
}
|
||||
|
||||
pub fn append(this: ASTMemoryAllocator, comptime ValueType: type, value: anytype) *ValueType {
|
||||
const ptr = this.bump_allocator.create(ValueType) catch unreachable;
|
||||
ptr.* = value;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/// Initialize ASTMemoryAllocator as `undefined`, and call this.
|
||||
pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) void {
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = arena,
|
||||
.fixed_buffer_allocator = .init(&.{}),
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const Stmt = js_ast.Stmt;
|
||||
143
src/ast/Ast.zig
Normal file
143
src/ast/Ast.zig
Normal file
@@ -0,0 +1,143 @@
|
||||
pub const TopLevelSymbolToParts = std.ArrayHashMapUnmanaged(Ref, BabyList(u32), Ref.ArrayHashCtx, false);
|
||||
|
||||
approximate_newline_count: usize = 0,
|
||||
has_lazy_export: bool = false,
|
||||
runtime_imports: Runtime.Imports = .{},
|
||||
|
||||
nested_scope_slot_counts: SlotCounts = SlotCounts{},
|
||||
|
||||
runtime_import_record_id: ?u32 = null,
|
||||
needs_runtime: bool = false,
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
has_top_level_return: bool = false,
|
||||
uses_exports_ref: bool = false,
|
||||
uses_module_ref: bool = false,
|
||||
uses_require_ref: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
|
||||
force_cjs_to_esm: bool = false,
|
||||
exports_kind: ExportsKind = ExportsKind.none,
|
||||
|
||||
// This is a list of ES6 features. They are ranges instead of booleans so
|
||||
// that they can be used in log messages. Check to see if "Len > 0".
|
||||
import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = logger.Range.None,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
import_records: ImportRecord.List = .{},
|
||||
|
||||
hashbang: string = "",
|
||||
directive: ?string = null,
|
||||
parts: Part.List = Part.List{},
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
symbols: Symbol.List = Symbol.List{},
|
||||
module_scope: Scope = Scope{},
|
||||
char_freq: ?CharFreq = null,
|
||||
exports_ref: Ref = Ref.None,
|
||||
module_ref: Ref = Ref.None,
|
||||
/// When using format .bake_internal_dev, this is the HMR variable instead
|
||||
/// of the wrapper. This is because that format does not store module
|
||||
/// wrappers in a variable.
|
||||
wrapper_ref: Ref = Ref.None,
|
||||
require_ref: Ref = Ref.None,
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
named_imports: NamedImports = .{},
|
||||
named_exports: NamedExports = .{},
|
||||
export_star_import_records: []u32 = &([_]u32{}),
|
||||
|
||||
// allocator: std.mem.Allocator,
|
||||
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
|
||||
|
||||
commonjs_named_exports: CommonJSNamedExports = .{},
|
||||
|
||||
redirect_import_record_index: ?u32 = null,
|
||||
|
||||
/// Only populated when bundling
|
||||
target: bun.options.Target = .browser,
|
||||
// const_values: ConstValuesMap = .{},
|
||||
ts_enums: TsEnumsMap = .{},
|
||||
|
||||
/// Not to be confused with `commonjs_named_exports`
|
||||
/// This is a list of named exports that may exist in a CommonJS module
|
||||
/// We use this with `commonjs_at_runtime` to re-export CommonJS
|
||||
has_commonjs_export_names: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
loc_ref: LocRef,
|
||||
needs_decl: bool = true,
|
||||
};
|
||||
pub const CommonJSNamedExports = bun.StringArrayHashMapUnmanaged(CommonJSNamedExport);
|
||||
|
||||
pub const NamedImports = std.ArrayHashMapUnmanaged(Ref, NamedImport, RefHashCtx, true);
|
||||
pub const NamedExports = bun.StringArrayHashMapUnmanaged(NamedExport);
|
||||
pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false);
|
||||
pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false);
|
||||
|
||||
pub fn fromParts(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initTest(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} };
|
||||
|
||||
pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
|
||||
const opts = std.json.StringifyOptions{ .whitespace = std.json.StringifyOptions.Whitespace{
|
||||
.separator = true,
|
||||
} };
|
||||
try std.json.stringify(self.parts, opts, stream);
|
||||
}
|
||||
|
||||
/// Do not call this if it wasn't globally allocated!
|
||||
pub fn deinit(this: *Ast) void {
|
||||
// TODO: assert mimalloc-owned memory
|
||||
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
const Runtime = @import("../runtime.zig").Runtime;
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Expr = js_ast.Expr;
|
||||
const InlinedEnumValue = js_ast.InlinedEnumValue;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const NamedExport = js_ast.NamedExport;
|
||||
const NamedImport = js_ast.NamedImport;
|
||||
const Part = js_ast.Part;
|
||||
const Ref = js_ast.Ref;
|
||||
const RefHashCtx = js_ast.RefHashCtx;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
106
src/ast/B.zig
Normal file
106
src/ast/B.zig
Normal file
@@ -0,0 +1,106 @@
|
||||
/// B is for Binding! Bindings are on the left side of variable
|
||||
/// declarations (s_local), which is how destructuring assignments
|
||||
/// are represented in memory. Consider a basic example.
|
||||
///
|
||||
/// let hello = world;
|
||||
/// ^ ^
|
||||
/// | E.Identifier
|
||||
/// B.Identifier
|
||||
///
|
||||
/// Bindings can be nested
|
||||
///
|
||||
/// B.Array
|
||||
/// | B.Identifier
|
||||
/// | |
|
||||
/// let { foo: [ bar ] } = ...
|
||||
/// ----------------
|
||||
/// B.Object
|
||||
pub const B = union(Binding.Tag) {
|
||||
// let x = ...
|
||||
b_identifier: *B.Identifier,
|
||||
// let [a, b] = ...
|
||||
b_array: *B.Array,
|
||||
// let { a, b: c } = ...
|
||||
b_object: *B.Object,
|
||||
// this is used to represent array holes
|
||||
b_missing: B.Missing,
|
||||
|
||||
pub const Identifier = struct {
|
||||
ref: Ref,
|
||||
};
|
||||
|
||||
pub const Property = struct {
|
||||
flags: Flags.Property.Set = Flags.Property.None,
|
||||
key: ExprNodeIndex,
|
||||
value: Binding,
|
||||
default_value: ?Expr = null,
|
||||
};
|
||||
|
||||
pub const Object = struct {
|
||||
properties: []B.Property,
|
||||
is_single_line: bool = false,
|
||||
|
||||
pub const Property = B.Property;
|
||||
};
|
||||
|
||||
pub const Array = struct {
|
||||
items: []ArrayBinding,
|
||||
has_spread: bool = false,
|
||||
is_single_line: bool = false,
|
||||
|
||||
pub const Item = ArrayBinding;
|
||||
};
|
||||
|
||||
pub const Missing = struct {};
|
||||
|
||||
/// This hash function is currently only used for React Fast Refresh transform.
|
||||
/// This doesn't include the `is_single_line` properties, as they only affect whitespace.
|
||||
pub fn writeToHasher(b: B, hasher: anytype, symbol_table: anytype) void {
|
||||
switch (b) {
|
||||
.b_identifier => |id| {
|
||||
const original_name = id.ref.getSymbol(symbol_table).original_name;
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), original_name.len });
|
||||
},
|
||||
.b_array => |array| {
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), array.has_spread, array.items.len });
|
||||
for (array.items) |item| {
|
||||
writeAnyToHasher(hasher, .{item.default_value != null});
|
||||
if (item.default_value) |default| {
|
||||
default.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
item.binding.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
},
|
||||
.b_object => |object| {
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), object.properties.len });
|
||||
for (object.properties) |property| {
|
||||
writeAnyToHasher(hasher, .{ property.default_value != null, property.flags });
|
||||
if (property.default_value) |default| {
|
||||
default.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
property.key.data.writeToHasher(hasher, symbol_table);
|
||||
property.value.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
},
|
||||
.b_missing => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const ArrayBinding = js_ast.ArrayBinding;
|
||||
const Binding = js_ast.Binding;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const Flags = js_ast.Flags;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
165
src/ast/Binding.zig
Normal file
165
src/ast/Binding.zig
Normal file
@@ -0,0 +1,165 @@
|
||||
loc: logger.Loc,
|
||||
data: B,
|
||||
|
||||
const Serializable = struct {
|
||||
type: Tag,
|
||||
object: string,
|
||||
value: B,
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type {
|
||||
const ExprType = expr_type;
|
||||
return struct {
|
||||
context: *ExprType,
|
||||
allocator: std.mem.Allocator,
|
||||
pub const Context = @This();
|
||||
|
||||
pub fn wrapIdentifier(ctx: *const Context, loc: logger.Loc, ref: Ref) Expr {
|
||||
return func_type(ctx.context, loc, ref);
|
||||
}
|
||||
|
||||
pub fn init(context: *ExprType) Context {
|
||||
return Context{ .context = context, .allocator = context.allocator };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
const loc = binding.loc;
|
||||
|
||||
switch (binding.data) {
|
||||
.b_missing => {
|
||||
return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = loc };
|
||||
},
|
||||
.b_identifier => |b| {
|
||||
return wrapper.wrapIdentifier(loc, b.ref);
|
||||
},
|
||||
.b_array => |b| {
|
||||
var exprs = wrapper.allocator.alloc(Expr, b.items.len) catch unreachable;
|
||||
var i: usize = 0;
|
||||
while (i < exprs.len) : (i += 1) {
|
||||
const item = b.items[i];
|
||||
exprs[i] = convert: {
|
||||
const expr = toExpr(&item.binding, wrapper);
|
||||
if (b.has_spread and i == exprs.len - 1) {
|
||||
break :convert Expr.init(E.Spread, E.Spread{ .value = expr }, expr.loc);
|
||||
} else if (item.default_value) |default| {
|
||||
break :convert Expr.assign(expr, default);
|
||||
} else {
|
||||
break :convert expr;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
|
||||
},
|
||||
.b_object => |b| {
|
||||
const properties = wrapper
|
||||
.allocator
|
||||
.alloc(G.Property, b.properties.len) catch unreachable;
|
||||
for (properties, b.properties) |*property, item| {
|
||||
property.* = .{
|
||||
.flags = item.flags,
|
||||
.key = item.key,
|
||||
.kind = if (item.flags.contains(.is_spread))
|
||||
.spread
|
||||
else
|
||||
.normal,
|
||||
.value = toExpr(&item.value, wrapper),
|
||||
.initializer = item.default_value,
|
||||
};
|
||||
}
|
||||
return Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = G.Property.List.init(properties),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub const Tag = enum(u5) {
|
||||
b_identifier,
|
||||
b_array,
|
||||
b_object,
|
||||
b_missing,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
pub var icount: usize = 0;
|
||||
|
||||
pub fn init(t: anytype, loc: logger.Loc) Binding {
|
||||
icount += 1;
|
||||
switch (@TypeOf(t)) {
|
||||
*B.Identifier => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_identifier = t } };
|
||||
},
|
||||
*B.Array => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_array = t } };
|
||||
},
|
||||
*B.Object => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_object = t } };
|
||||
},
|
||||
B.Missing => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_missing = t } };
|
||||
},
|
||||
else => {
|
||||
@compileError("Invalid type passed to Binding.init");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding {
|
||||
icount += 1;
|
||||
switch (@TypeOf(t)) {
|
||||
B.Identifier => {
|
||||
const data = allocator.create(B.Identifier) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_identifier = data } };
|
||||
},
|
||||
B.Array => {
|
||||
const data = allocator.create(B.Array) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_array = data } };
|
||||
},
|
||||
B.Object => {
|
||||
const data = allocator.create(B.Object) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_object = data } };
|
||||
},
|
||||
B.Missing => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_missing = .{} } };
|
||||
},
|
||||
else => {
|
||||
@compileError("Invalid type passed to Binding.alloc");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const G = js_ast.G;
|
||||
const Ref = js_ast.Ref;
|
||||
231
src/ast/BundledAst.zig
Normal file
231
src/ast/BundledAst.zig
Normal file
@@ -0,0 +1,231 @@
|
||||
//! Like Ast but slimmer and for bundling only.
|
||||
//!
|
||||
//! On Linux, the hottest function in the bundler is:
|
||||
//! src.multi_array_list.MultiArrayList(src.js_ast.Ast).ensureTotalCapacity
|
||||
//! https://share.firefox.dev/3NNlRKt
|
||||
//!
|
||||
//! So we make a slimmer version of Ast for bundling that doesn't allocate as much memory
|
||||
|
||||
approximate_newline_count: u32 = 0,
|
||||
nested_scope_slot_counts: SlotCounts = .{},
|
||||
|
||||
exports_kind: ExportsKind = .none,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
import_records: ImportRecord.List = .{},
|
||||
|
||||
hashbang: string = "",
|
||||
parts: Part.List = .{},
|
||||
css: ?*bun.css.BundlerStyleSheet = null,
|
||||
url_for_css: []const u8 = "",
|
||||
symbols: Symbol.List = .{},
|
||||
module_scope: Scope = .{},
|
||||
char_freq: CharFreq = undefined,
|
||||
exports_ref: Ref = Ref.None,
|
||||
module_ref: Ref = Ref.None,
|
||||
wrapper_ref: Ref = Ref.None,
|
||||
require_ref: Ref = Ref.None,
|
||||
top_level_await_keyword: logger.Range,
|
||||
tla_check: TlaCheck = .{},
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
named_imports: NamedImports = .{},
|
||||
named_exports: NamedExports = .{},
|
||||
export_star_import_records: []u32 = &.{},
|
||||
|
||||
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
|
||||
|
||||
commonjs_named_exports: CommonJSNamedExports = .{},
|
||||
|
||||
redirect_import_record_index: u32 = std.math.maxInt(u32),
|
||||
|
||||
/// Only populated when bundling. When --server-components is passed, this
|
||||
/// will be .browser when it is a client component, and the server's target
|
||||
/// on the server.
|
||||
target: bun.options.Target = .browser,
|
||||
|
||||
// const_values: ConstValuesMap = .{},
|
||||
ts_enums: Ast.TsEnumsMap = .{},
|
||||
|
||||
flags: BundledAst.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
uses_exports_ref: bool = false,
|
||||
uses_module_ref: bool = false,
|
||||
// uses_require_ref: bool = false,
|
||||
uses_export_keyword: bool = false,
|
||||
has_char_freq: bool = false,
|
||||
force_cjs_to_esm: bool = false,
|
||||
has_lazy_export: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
has_explicit_use_strict_directive: bool = false,
|
||||
};
|
||||
|
||||
pub const empty = BundledAst.init(Ast.empty);
|
||||
|
||||
pub fn toAST(this: *const BundledAst) Ast {
|
||||
return .{
|
||||
.approximate_newline_count = this.approximate_newline_count,
|
||||
.nested_scope_slot_counts = this.nested_scope_slot_counts,
|
||||
|
||||
.exports_kind = this.exports_kind,
|
||||
|
||||
.import_records = this.import_records,
|
||||
|
||||
.hashbang = this.hashbang,
|
||||
.parts = this.parts,
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
.symbols = this.symbols,
|
||||
.module_scope = this.module_scope,
|
||||
.char_freq = if (this.flags.has_char_freq) this.char_freq else null,
|
||||
.exports_ref = this.exports_ref,
|
||||
.module_ref = this.module_ref,
|
||||
.wrapper_ref = this.wrapper_ref,
|
||||
.require_ref = this.require_ref,
|
||||
.top_level_await_keyword = this.top_level_await_keyword,
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
.named_imports = this.named_imports,
|
||||
.named_exports = this.named_exports,
|
||||
.export_star_import_records = this.export_star_import_records,
|
||||
|
||||
.top_level_symbols_to_parts = this.top_level_symbols_to_parts,
|
||||
|
||||
.commonjs_named_exports = this.commonjs_named_exports,
|
||||
|
||||
.redirect_import_record_index = this.redirect_import_record_index,
|
||||
|
||||
.target = this.target,
|
||||
|
||||
// .const_values = this.const_values,
|
||||
.ts_enums = this.ts_enums,
|
||||
|
||||
.uses_exports_ref = this.flags.uses_exports_ref,
|
||||
.uses_module_ref = this.flags.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} },
|
||||
.force_cjs_to_esm = this.flags.force_cjs_to_esm,
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
.directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init(ast: Ast) BundledAst {
|
||||
return .{
|
||||
.approximate_newline_count = @as(u32, @truncate(ast.approximate_newline_count)),
|
||||
.nested_scope_slot_counts = ast.nested_scope_slot_counts,
|
||||
|
||||
.exports_kind = ast.exports_kind,
|
||||
|
||||
.import_records = ast.import_records,
|
||||
|
||||
.hashbang = ast.hashbang,
|
||||
.parts = ast.parts,
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
.symbols = ast.symbols,
|
||||
.module_scope = ast.module_scope,
|
||||
.char_freq = ast.char_freq orelse undefined,
|
||||
.exports_ref = ast.exports_ref,
|
||||
.module_ref = ast.module_ref,
|
||||
.wrapper_ref = ast.wrapper_ref,
|
||||
.require_ref = ast.require_ref,
|
||||
.top_level_await_keyword = ast.top_level_await_keyword,
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
.named_imports = ast.named_imports,
|
||||
.named_exports = ast.named_exports,
|
||||
.export_star_import_records = ast.export_star_import_records,
|
||||
|
||||
// .allocator = ast.allocator,
|
||||
.top_level_symbols_to_parts = ast.top_level_symbols_to_parts,
|
||||
|
||||
.commonjs_named_exports = ast.commonjs_named_exports,
|
||||
|
||||
.redirect_import_record_index = ast.redirect_import_record_index orelse std.math.maxInt(u32),
|
||||
|
||||
.target = ast.target,
|
||||
|
||||
// .const_values = ast.const_values,
|
||||
.ts_enums = ast.ts_enums,
|
||||
|
||||
.flags = .{
|
||||
.uses_exports_ref = ast.uses_exports_ref,
|
||||
.uses_module_ref = ast.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.uses_export_keyword = ast.export_keyword.len > 0,
|
||||
.has_char_freq = ast.char_freq != null,
|
||||
.force_cjs_to_esm = ast.force_cjs_to_esm,
|
||||
.has_lazy_export = ast.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized,
|
||||
.has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// TODO: Move this from being done on all parse tasks into the start of the linker. This currently allocates base64 encoding for every small file loaded thing.
|
||||
pub fn addUrlForCss(
|
||||
this: *BundledAst,
|
||||
allocator: std.mem.Allocator,
|
||||
source: *const logger.Source,
|
||||
mime_type_: ?[]const u8,
|
||||
unique_key: ?[]const u8,
|
||||
) void {
|
||||
{
|
||||
const mime_type = if (mime_type_) |m| m else MimeType.byExtension(bun.strings.trimLeadingChar(std.fs.path.extension(source.path.text), '.')).value;
|
||||
const contents = source.contents;
|
||||
// TODO: make this configurable
|
||||
const COPY_THRESHOLD = 128 * 1024; // 128kb
|
||||
const should_copy = contents.len >= COPY_THRESHOLD and unique_key != null;
|
||||
if (should_copy) return;
|
||||
this.url_for_css = url_for_css: {
|
||||
|
||||
// Encode as base64
|
||||
const encode_len = bun.base64.encodeLen(contents);
|
||||
const data_url_prefix_len = "data:".len + mime_type.len + ";base64,".len;
|
||||
const total_buffer_len = data_url_prefix_len + encode_len;
|
||||
var encoded = allocator.alloc(u8, total_buffer_len) catch bun.outOfMemory();
|
||||
_ = std.fmt.bufPrint(encoded[0..data_url_prefix_len], "data:{s};base64,", .{mime_type}) catch unreachable;
|
||||
const len = bun.base64.encode(encoded[data_url_prefix_len..], contents);
|
||||
break :url_for_css encoded[0 .. data_url_prefix_len + len];
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const BundledAst = js_ast.BundledAst;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Part = js_ast.Part;
|
||||
const Ref = js_ast.Ref;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const TlaCheck = js_ast.TlaCheck;
|
||||
|
||||
const Ast = js_ast.Ast;
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
139
src/ast/CharFreq.zig
Normal file
139
src/ast/CharFreq.zig
Normal file
@@ -0,0 +1,139 @@
|
||||
pub const char_freq_count = 64;
|
||||
pub const CharAndCount = struct {
|
||||
char: u8 = 0,
|
||||
count: i32 = 0,
|
||||
index: usize = 0,
|
||||
|
||||
pub const Array = [char_freq_count]CharAndCount;
|
||||
|
||||
pub fn lessThan(_: void, a: CharAndCount, b: CharAndCount) bool {
|
||||
if (a.count != b.count) {
|
||||
return a.count > b.count;
|
||||
}
|
||||
|
||||
if (a.index != b.index) {
|
||||
return a.index < b.index;
|
||||
}
|
||||
|
||||
return a.char < b.char;
|
||||
}
|
||||
};
|
||||
|
||||
const Vector = @Vector(char_freq_count, i32);
|
||||
const Buffer = [char_freq_count]i32;
|
||||
|
||||
freqs: Buffer align(1) = undefined,
|
||||
|
||||
const scan_big_chunk_size = 32;
|
||||
pub fn scan(this: *CharFreq, text: string, delta: i32) void {
|
||||
if (delta == 0)
|
||||
return;
|
||||
|
||||
if (text.len < scan_big_chunk_size) {
|
||||
scanSmall(&this.freqs, text, delta);
|
||||
} else {
|
||||
scanBig(&this.freqs, text, delta);
|
||||
}
|
||||
}
|
||||
|
||||
fn scanBig(out: *align(1) Buffer, text: string, delta: i32) void {
|
||||
// https://zig.godbolt.org/z/P5dPojWGK
|
||||
var freqs = out.*;
|
||||
defer out.* = freqs;
|
||||
var deltas: [256]i32 = [_]i32{0} ** 256;
|
||||
var remain = text;
|
||||
|
||||
bun.assert(remain.len >= scan_big_chunk_size);
|
||||
|
||||
const unrolled = remain.len - (remain.len % scan_big_chunk_size);
|
||||
const remain_end = remain.ptr + unrolled;
|
||||
var unrolled_ptr = remain.ptr;
|
||||
remain = remain[unrolled..];
|
||||
|
||||
while (unrolled_ptr != remain_end) : (unrolled_ptr += scan_big_chunk_size) {
|
||||
const chunk = unrolled_ptr[0..scan_big_chunk_size].*;
|
||||
inline for (0..scan_big_chunk_size) |i| {
|
||||
deltas[@as(usize, chunk[i])] += delta;
|
||||
}
|
||||
}
|
||||
|
||||
for (remain) |c| {
|
||||
deltas[@as(usize, c)] += delta;
|
||||
}
|
||||
|
||||
freqs[0..26].* = deltas['a' .. 'a' + 26].*;
|
||||
freqs[26 .. 26 * 2].* = deltas['A' .. 'A' + 26].*;
|
||||
freqs[26 * 2 .. 62].* = deltas['0' .. '0' + 10].*;
|
||||
freqs[62] = deltas['_'];
|
||||
freqs[63] = deltas['$'];
|
||||
}
|
||||
|
||||
fn scanSmall(out: *align(1) Buffer, text: string, delta: i32) void {
|
||||
var freqs: [char_freq_count]i32 = out.*;
|
||||
defer out.* = freqs;
|
||||
|
||||
for (text) |c| {
|
||||
const i: usize = switch (c) {
|
||||
'a'...'z' => @as(usize, @intCast(c)) - 'a',
|
||||
'A'...'Z' => @as(usize, @intCast(c)) - ('A' - 26),
|
||||
'0'...'9' => @as(usize, @intCast(c)) + (53 - '0'),
|
||||
'_' => 62,
|
||||
'$' => 63,
|
||||
else => continue,
|
||||
};
|
||||
freqs[i] += delta;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn include(this: *CharFreq, other: CharFreq) void {
|
||||
// https://zig.godbolt.org/z/Mq8eK6K9s
|
||||
const left: @Vector(char_freq_count, i32) = this.freqs;
|
||||
const right: @Vector(char_freq_count, i32) = other.freqs;
|
||||
|
||||
this.freqs = left + right;
|
||||
}
|
||||
|
||||
pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier {
|
||||
const array: CharAndCount.Array = brk: {
|
||||
var _array: CharAndCount.Array = undefined;
|
||||
|
||||
for (&_array, NameMinifier.default_tail, this.freqs, 0..) |*dest, char, freq, i| {
|
||||
dest.* = CharAndCount{
|
||||
.char = char,
|
||||
.index = i,
|
||||
.count = freq,
|
||||
};
|
||||
}
|
||||
|
||||
std.sort.pdq(CharAndCount, &_array, {}, CharAndCount.lessThan);
|
||||
|
||||
break :brk _array;
|
||||
};
|
||||
|
||||
var minifier = NameMinifier.init(allocator);
|
||||
minifier.head.ensureTotalCapacityPrecise(NameMinifier.default_head.len) catch unreachable;
|
||||
minifier.tail.ensureTotalCapacityPrecise(NameMinifier.default_tail.len) catch unreachable;
|
||||
// TODO: investigate counting number of < 0 and > 0 and pre-allocating
|
||||
for (array) |item| {
|
||||
if (item.char < '0' or item.char > '9') {
|
||||
minifier.head.append(item.char) catch unreachable;
|
||||
}
|
||||
minifier.tail.append(item.char) catch unreachable;
|
||||
}
|
||||
|
||||
return minifier;
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const NameMinifier = js_ast.NameMinifier;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
1441
src/ast/E.zig
Normal file
1441
src/ast/E.zig
Normal file
File diff suppressed because it is too large
Load Diff
3231
src/ast/Expr.zig
Normal file
3231
src/ast/Expr.zig
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user