mirror of
https://github.com/oven-sh/bun
synced 2026-02-06 00:48:55 +00:00
Compare commits
90 Commits
claude/fix
...
claude/nod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
715413adc0 | ||
|
|
806d6c156f | ||
|
|
198d7c3b19 | ||
|
|
dfe1a1848a | ||
|
|
0612f459a4 | ||
|
|
408fda7ad2 | ||
|
|
7ad3049e70 | ||
|
|
ed6f099e5e | ||
|
|
258a2a2e3a | ||
|
|
4568258960 | ||
|
|
dd27ad7716 | ||
|
|
d3d08eeb2d | ||
|
|
47727bdbe3 | ||
|
|
be5c69df79 | ||
|
|
9785e37e10 | ||
|
|
4494353abf | ||
|
|
fa1ad54257 | ||
|
|
a0687c06f8 | ||
|
|
15578df7fc | ||
|
|
b6d3768038 | ||
|
|
1ac2391b20 | ||
|
|
276eee74eb | ||
|
|
deaef1882b | ||
|
|
711de8a667 | ||
|
|
a5af485354 | ||
|
|
73e737be56 | ||
|
|
68d322f05f | ||
|
|
39eccf89a8 | ||
|
|
a729a046bd | ||
|
|
9bb4a6af19 | ||
|
|
07ffde8a69 | ||
|
|
bb67f2b345 | ||
|
|
7c4c360431 | ||
|
|
4b39a9b07d | ||
|
|
d0edcc69ae | ||
|
|
0cf2b71ff1 | ||
|
|
40bff9fea8 | ||
|
|
7726e5c670 | ||
|
|
7a31108019 | ||
|
|
dd68364630 | ||
|
|
7d4f6efe7a | ||
|
|
7cdcd34f58 | ||
|
|
2a6d018d73 | ||
|
|
8efe7945eb | ||
|
|
5bdcf339d7 | ||
|
|
03afe6ef28 | ||
|
|
ce5152dd7a | ||
|
|
5c65c18e72 | ||
|
|
100ab8c503 | ||
|
|
a51af710c0 | ||
|
|
5ca1580427 | ||
|
|
b34bab745b | ||
|
|
6034c2f94b | ||
|
|
2b5a59cae1 | ||
|
|
3bcf93ddd6 | ||
|
|
53b24ace79 | ||
|
|
a1f44caa87 | ||
|
|
3de884f2c9 | ||
|
|
a6162295c5 | ||
|
|
80c46b1607 | ||
|
|
26cbcd21c1 | ||
|
|
3d6dda6901 | ||
|
|
93f92658b3 | ||
|
|
f8c2dac836 | ||
|
|
4bbe32fff8 | ||
|
|
60c735a11d | ||
|
|
003d13ec27 | ||
|
|
245abb92fb | ||
|
|
066a25ac40 | ||
|
|
ab88317846 | ||
|
|
e7373bbf32 | ||
|
|
4687cc4f5e | ||
|
|
a5ff729665 | ||
|
|
62e8a7fb01 | ||
|
|
220807f3dc | ||
|
|
562f82d3f8 | ||
|
|
4580e11fc3 | ||
|
|
2956281845 | ||
|
|
9a2dfee3ca | ||
|
|
7a47c945aa | ||
|
|
24b7835ecd | ||
|
|
95990e7bd6 | ||
|
|
f2e487b1e6 | ||
|
|
3315ade0e9 | ||
|
|
95e653e52b | ||
|
|
a8522b16af | ||
|
|
aba8c4efd2 | ||
|
|
0bebdc9049 | ||
|
|
1058d0dee4 | ||
|
|
679a07caef |
@@ -1,78 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,50 +1,3 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
15
.github/workflows/update-hdrhistogram.yml
vendored
15
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,10 +55,13 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -88,7 +91,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -96,4 +99,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
|
||||
33
.github/workflows/update-highway.yml
vendored
33
.github/workflows/update-highway.yml
vendored
@@ -50,14 +50,33 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -88,7 +107,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -96,4 +115,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
|
||||
22
.github/workflows/update-lolhtml.yml
vendored
22
.github/workflows/update-lolhtml.yml
vendored
@@ -50,15 +50,27 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
27
.github/workflows/update-lshpack.yml
vendored
27
.github/workflows/update-lshpack.yml
vendored
@@ -50,15 +50,32 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -184,4 +184,6 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
// "bun.test.customScript": "./build/debug/bun-debug test"
|
||||
"bun.test.customScript": "./build/debug/bun-debug test"
|
||||
}
|
||||
|
||||
29
CLAUDE.md
29
CLAUDE.md
@@ -6,7 +6,7 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~5 minutes. Don't timeout, be patient.
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
@@ -59,8 +59,8 @@ test("my feature", async () => {
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
@@ -69,6 +69,8 @@ test("my feature", async () => {
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
@@ -133,7 +135,6 @@ test("my feature", async () => {
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
@@ -193,7 +194,6 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
@@ -221,15 +221,16 @@ bun ci
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
2. **All changes must be tested** - if you're not testing your changes, you're not done.
|
||||
3. **Get your tests to pass**. If you didn't run the tests, your code does not work.
|
||||
4. **Follow existing code style** - check neighboring files for patterns
|
||||
5. **Create tests in the right folder** in `test/` and the test must end in `.test.ts` or `.test.tsx`
|
||||
6. **Use absolute paths** - Always use absolute paths in file operations
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
|
||||
@@ -28,10 +28,4 @@ bench("brotli compress stream", async () => {
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
7
bun.lock
7
bun.lock
@@ -15,7 +15,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.7.2",
|
||||
"typescript": "5.9.2",
|
||||
},
|
||||
},
|
||||
"packages/@types/bun": {
|
||||
@@ -32,7 +32,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
@@ -308,7 +307,7 @@
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
@@ -334,6 +333,8 @@
|
||||
|
||||
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
|
||||
|
||||
"bun-tracestrings/typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
@@ -102,6 +102,11 @@ else()
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
optionx(ENABLE_ZIG_ASAN BOOL "If Zig ASAN support should be enabled" DEFAULT ${ENABLE_ASAN})
|
||||
|
||||
if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
|
||||
@@ -350,6 +350,7 @@ src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
|
||||
@@ -29,6 +29,7 @@ src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
|
||||
@@ -10,6 +10,7 @@ src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/api/schema.zig
|
||||
src/asan.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
@@ -377,7 +378,6 @@ src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/collections/safety.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -546,6 +546,7 @@ src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/ETag.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
@@ -633,6 +634,7 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/install/yarn.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
@@ -703,6 +705,9 @@ src/s3/multipart_options.zig
|
||||
src/s3/multipart.zig
|
||||
src/s3/simple_request.zig
|
||||
src/s3/storage_class.zig
|
||||
src/safety.zig
|
||||
src/safety/alloc_ptr.zig
|
||||
src/safety/CriticalSection.zig
|
||||
src/semver.zig
|
||||
src/semver/ExternalString.zig
|
||||
src/semver/SemverObject.zig
|
||||
@@ -853,6 +858,10 @@ src/string/StringJoiner.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/sys/coreutils_error_map.zig
|
||||
src/sys/Error.zig
|
||||
src/sys/File.zig
|
||||
src/sys/libuv_error_map.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
|
||||
@@ -618,7 +618,7 @@ register_command(
|
||||
-Doptimize=${ZIG_OPTIMIZE}
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -1034,7 +1034,6 @@ if(LINUX)
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
@@ -1060,6 +1059,13 @@ if(LINUX)
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
7118f97c26bf0b2f426728b482f86508efc81d02
|
||||
9525f90ca4bd14c7b335e2f8c84a4607b0af6bdf
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
178534eeb7c0b4e2f438b513640c6f4d7338416a
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -13,14 +13,47 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_SHARED=OFF
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_OVERRIDE=OFF
|
||||
-DMI_OSX_ZONE=OFF
|
||||
-DMI_OSX_INTERPOSE=OFF
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=1 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 74 MB
|
||||
# ```
|
||||
#
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=1 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=0 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 30 MB
|
||||
# ```
|
||||
-DMI_NO_THP=1
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_TRACK_ASAN=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
elseif(APPLE OR LINUX)
|
||||
# Enable static override when ASAN is not enabled
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=ON)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=ON)
|
||||
else()
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
@@ -31,13 +64,7 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
set(MIMALLOC_LIBRARY mimalloc-asan-debug)
|
||||
else()
|
||||
@@ -53,6 +80,7 @@ if(APPLE OR (LINUX AND NOT DEBUG))
|
||||
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
|
||||
endif()
|
||||
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
mimalloc
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
set(WEBKIT_VERSION 642e2252f6298387edb6d2f991a0408fd0320466)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -164,6 +164,70 @@ Static responses do not allocate additional memory after initialization. You can
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
### File Responses vs Static Responses
|
||||
|
||||
When serving files in routes, there are two distinct behaviors depending on whether you buffer the file content or serve it directly:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Static route - content is buffered in memory at startup
|
||||
"/logo.png": new Response(await Bun.file("./logo.png").bytes()),
|
||||
|
||||
// File route - content is read from filesystem on each request
|
||||
"/download.zip": new Response(Bun.file("./download.zip")),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Static routes** (`new Response(await file.bytes())`) buffer content in memory at startup:
|
||||
|
||||
- **Zero filesystem I/O** during requests - content served entirely from memory
|
||||
- **ETag support** - Automatically generates and validates ETags for caching
|
||||
- **If-None-Match** - Returns `304 Not Modified` when client ETag matches
|
||||
- **No 404 handling** - Missing files cause startup errors, not runtime 404s
|
||||
- **Memory usage** - Full file content stored in RAM
|
||||
- **Best for**: Small static assets, API responses, frequently accessed files
|
||||
|
||||
**File routes** (`new Response(Bun.file(path))`) read from filesystem per request:
|
||||
|
||||
- **Filesystem reads** on each request - checks file existence and reads content
|
||||
- **Built-in 404 handling** - Returns `404 Not Found` if file doesn't exist or becomes inaccessible
|
||||
- **Last-Modified support** - Uses file modification time for `If-Modified-Since` headers
|
||||
- **If-Modified-Since** - Returns `304 Not Modified` when file hasn't changed since client's cached version
|
||||
- **Range request support** - Automatically handles partial content requests with `Content-Range` headers
|
||||
- **Streaming transfers** - Uses buffered reader with backpressure handling for efficient memory usage
|
||||
- **Memory efficient** - Only buffers small chunks during transfer, not entire file
|
||||
- **Best for**: Large files, dynamic content, user uploads, files that change frequently
|
||||
|
||||
### HTTP Caching Behavior
|
||||
|
||||
Both route types implement HTTP caching standards but with different strategies:
|
||||
|
||||
#### Static Routes Caching
|
||||
|
||||
- **ETag generation**: Automatically computes ETag hash from content at startup
|
||||
- **If-None-Match**: Validates client ETag against server ETag
|
||||
- **304 responses**: Returns `304 Not Modified` with empty body when ETags match
|
||||
- **Cache headers**: Inherits any `Cache-Control` headers you provide in the Response
|
||||
- **Consistency**: ETag remains constant until server restart or route reload
|
||||
|
||||
#### File Routes Caching
|
||||
|
||||
- **Last-Modified**: Uses file's `mtime` for `Last-Modified` header
|
||||
- **If-Modified-Since**: Compares client date with file modification time
|
||||
- **304 responses**: Returns `304 Not Modified` when file unchanged since client's cached version
|
||||
- **Content-Length**: Automatically set based on current file size
|
||||
- **Dynamic validation**: Checks file modification time on each request
|
||||
|
||||
#### Status Code Handling
|
||||
|
||||
Both route types automatically adjust status codes:
|
||||
|
||||
- **200 → 204**: Empty files (0 bytes) return `204 No Content` instead of `200 OK`
|
||||
- **200 → 304**: Successful cache validation returns `304 Not Modified`
|
||||
- **File routes only**: Missing or inaccessible files return `404 Not Found`
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
|
||||
@@ -208,8 +208,8 @@ export class ArrayBufferSink {
|
||||
*
|
||||
* This API might change later to separate Uint8ArraySink and ArrayBufferSink
|
||||
*/
|
||||
flush(): number | Uint8Array | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array;
|
||||
flush(): number | Uint8Array<ArrayBuffer> | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array<ArrayBuffer>;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -158,7 +158,7 @@ See [Test > Lifecycle](https://bun.com/docs/test/lifecycle) for complete documen
|
||||
|
||||
## Mocks
|
||||
|
||||
Create mock functions with the `mock` function. Mocks are automatically reset between tests.
|
||||
Create mock functions with the `mock` function.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
@@ -14,7 +14,7 @@ if (typeof Bun !== "undefined") {
|
||||
|
||||
---
|
||||
|
||||
In TypeScript environments, the previous approach will result in a type error unless `bun-types` is globally installed. To avoid this, you can check `process.versions` instead.
|
||||
In TypeScript environments, the previous approach will result in a type error unless `@types/bun` is installed. To avoid this, you can check `process.versions` instead.
|
||||
|
||||
```ts
|
||||
if (process.versions.bun) {
|
||||
|
||||
@@ -426,6 +426,54 @@ test("exactly two assertions", () => {
|
||||
|
||||
This helps ensure all your assertions run, especially in complex async code with multiple code paths.
|
||||
|
||||
## Type Testing
|
||||
|
||||
Bun includes `expectTypeOf` for testing typescript types, compatible with Vitest.
|
||||
|
||||
### expectTypeOf
|
||||
|
||||
{% callout %}
|
||||
|
||||
**Note** — These functions are no-ops at runtime - you need to run TypeScript separately to verify the type checks.
|
||||
|
||||
{% endcallout %}
|
||||
|
||||
The `expectTypeOf` function provides type-level assertions that are checked by TypeScript's type checker. **Important**:
|
||||
|
||||
To test your types:
|
||||
|
||||
1. Write your type assertions using `expectTypeOf`
|
||||
2. Run `bunx tsc --noEmit` to check that your types are correct
|
||||
|
||||
```ts
|
||||
import { expectTypeOf } from "bun:test";
|
||||
|
||||
// Basic type assertions
|
||||
expectTypeOf<string>().toEqualTypeOf<string>();
|
||||
expectTypeOf(123).toBeNumber();
|
||||
expectTypeOf("hello").toBeString();
|
||||
|
||||
// Object type matching
|
||||
expectTypeOf({ a: 1, b: "hello" }).toMatchObjectType<{ a: number }>();
|
||||
|
||||
// Function types
|
||||
function greet(name: string): string {
|
||||
return `Hello ${name}`;
|
||||
}
|
||||
|
||||
expectTypeOf(greet).toBeFunction();
|
||||
expectTypeOf(greet).parameters.toEqualTypeOf<[string]>();
|
||||
expectTypeOf(greet).returns.toEqualTypeOf<string>();
|
||||
|
||||
// Array types
|
||||
expectTypeOf([1, 2, 3]).items.toBeNumber();
|
||||
|
||||
// Promise types
|
||||
expectTypeOf(Promise.resolve(42)).resolves.toBeNumber();
|
||||
```
|
||||
|
||||
For full documentation on expectTypeOf matchers, see the [API Reference](/reference/bun/test/expectTypeOf)
|
||||
|
||||
## Matchers
|
||||
|
||||
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).
|
||||
@@ -629,17 +677,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveReturnedWith()`](https://jestjs.io/docs/expect#tohavereturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveLastReturnedWith()`](https://jestjs.io/docs/expect#tohavelastreturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveNthReturnedWith()`](https://jestjs.io/docs/expect#tohaventhreturnedwithnthcall-value)
|
||||
|
||||
---
|
||||
|
||||
1
examples/.gitignore
vendored
1
examples/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
bun-examples-all
|
||||
@@ -1,7 +0,0 @@
|
||||
#[no_mangle]
|
||||
pub extern "C" fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// rustc --crate-type cdylib add.rs
|
||||
@@ -1,12 +0,0 @@
|
||||
import { dlopen, suffix } from "bun:ffi";
|
||||
|
||||
const {
|
||||
symbols: { add },
|
||||
} = dlopen(`./libadd.${suffix}`, {
|
||||
add: {
|
||||
args: ["i32", "i32"],
|
||||
returns: "i32",
|
||||
},
|
||||
});
|
||||
|
||||
console.log(add(1, 2));
|
||||
@@ -1,6 +0,0 @@
|
||||
pub export fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// zig build-lib -OReleaseFast ./add.zig -dynamic --name add
|
||||
@@ -1,89 +0,0 @@
|
||||
// To run this example:
|
||||
//
|
||||
// bun --hot bun-hot-websockets.js
|
||||
//
|
||||
|
||||
const css = ([inner]) => {
|
||||
return inner;
|
||||
};
|
||||
|
||||
const styles = css`
|
||||
#bun {
|
||||
margin: 0 auto;
|
||||
margin-top: 200px;
|
||||
object-fit: cover;
|
||||
}
|
||||
html,
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
background: #f1239f;
|
||||
font-family: "Inter", sans-serif;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
align-content: center;
|
||||
color: white;
|
||||
}
|
||||
h1 {
|
||||
padding: 0;
|
||||
text-align: center;
|
||||
font-size: 3rem;
|
||||
-webkit-text-stroke: 2px black;
|
||||
}
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
`;
|
||||
|
||||
Bun.serve({
|
||||
websocket: {
|
||||
message(ws, msg) {
|
||||
ws.send(styles);
|
||||
},
|
||||
},
|
||||
fetch(req, server) {
|
||||
if (req.url.endsWith("/hot")) {
|
||||
if (server.upgrade(req))
|
||||
return new Response("", {
|
||||
status: 101,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(
|
||||
`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>WebSockets</title>
|
||||
</head>
|
||||
<body>
|
||||
<style></style>
|
||||
<script>
|
||||
const ws = new WebSocket("ws://localhost:3000/hot");
|
||||
const style = document.querySelector("style");
|
||||
ws.onmessage = (e) => {
|
||||
style.innerHTML = e.data;
|
||||
};
|
||||
setInterval(() => {
|
||||
ws.send("ping");
|
||||
}, 8);
|
||||
</script>
|
||||
<div id="app">
|
||||
<img src="https://bun.com/logo.svg" alt="Bun" id='bun' />
|
||||
<h1>bun --hot websockets</h1>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
import { argv } from "process";
|
||||
|
||||
const path = resolve(argv.at(-1)!);
|
||||
await write(stdout, file(path));
|
||||
|
||||
Bun.stdout;
|
||||
process.stdout;
|
||||
@@ -1,11 +0,0 @@
|
||||
const sequence = [1, 2, 3];
|
||||
sequence.toReversed(); // => [3, 2, 1]
|
||||
sequence; // => [1, 2, 3]
|
||||
|
||||
const outOfOrder = new Uint8Array([3, 1, 2]);
|
||||
outOfOrder.toSorted(); // => Uint8Array [1, 2, 3]
|
||||
outOfOrder; // => Uint8Array [3, 1, 2]
|
||||
|
||||
const correctionNeeded = [1, 1, 3];
|
||||
correctionNeeded.with(1, 2); // => [1, 2, 3]
|
||||
correctionNeeded; // => [1, 1, 3]
|
||||
@@ -1,23 +0,0 @@
|
||||
// Accepts a string, TypedArray, or Blob (file blob support is not implemented but planned)
|
||||
const input = "hello world".repeat(400);
|
||||
|
||||
// Bun.hash() defaults to Wyhash because it's fast
|
||||
console.log(Bun.hash(input));
|
||||
|
||||
console.log(Bun.hash.wyhash(input));
|
||||
// and returns a bigint
|
||||
// all of these hashing functions return number if 32-bit or bigint if 64-bit, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input)); // number
|
||||
console.log(Bun.hash.crc32(input)); // number
|
||||
console.log(Bun.hash.cityHash32(input)); // number
|
||||
console.log(Bun.hash.cityHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash32(input)); // number
|
||||
console.log(Bun.hash.xxHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
@@ -1,37 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
|
||||
Bun.serve({
|
||||
async fetch(req) {
|
||||
const { pathname } = new URL(req.url);
|
||||
if (!(pathname.startsWith("/https://") || pathname.startsWith("/http://"))) {
|
||||
return new Response("Enter a path that starts with https:// or http://\n", {
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const response = await fetch(req.url.substring("http://localhost:3000/".length), req.clone());
|
||||
|
||||
return new HTMLRewriter()
|
||||
.on("a[href]", {
|
||||
element(element) {
|
||||
element.setAttribute("href", "https://www.youtube.com/watch?v=dQw4w9WgXcQ");
|
||||
},
|
||||
})
|
||||
.transform(response);
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
@@ -1,76 +0,0 @@
|
||||
import { file, serve } from "bun";
|
||||
import { existsSync, statSync } from "fs";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
let pathname = new URL(req.url).pathname.substring(1);
|
||||
if (pathname == "") {
|
||||
pathname = import.meta.url.replace("file://", "");
|
||||
}
|
||||
|
||||
if (!existsSync(pathname)) {
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
|
||||
const stats = statSync(pathname);
|
||||
|
||||
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
|
||||
const headers = new Headers({
|
||||
"Content-Length": "" + stats.size,
|
||||
"Last-Modified": stats.mtime.toUTCString(),
|
||||
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
|
||||
});
|
||||
|
||||
if (req.headers.get("if-none-match") === headers.get("ETag")) {
|
||||
return new Response(null, { status: 304 });
|
||||
}
|
||||
|
||||
const opts = { code: 200, start: 0, end: Infinity, range: false };
|
||||
|
||||
if (req.headers.has("range")) {
|
||||
opts.code = 206;
|
||||
let [x, y] = req.headers.get("range")!.replace("bytes=", "").split("-");
|
||||
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
|
||||
let start = (opts.start = parseInt(x, 10) || 0);
|
||||
|
||||
if (start >= stats.size || end >= stats.size) {
|
||||
headers.set("Content-Range", `bytes */${stats.size}`);
|
||||
return new Response(null, {
|
||||
headers: headers,
|
||||
status: 416,
|
||||
});
|
||||
}
|
||||
|
||||
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
|
||||
headers.set("Content-Length", "" + (end - start + 1));
|
||||
headers.set("Accept-Ranges", "bytes");
|
||||
opts.range = true;
|
||||
}
|
||||
|
||||
if (opts.range) {
|
||||
return new Response(file(pathname).slice(opts.start, opts.end), {
|
||||
headers,
|
||||
status: opts.code,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(file(pathname), { headers, status: opts.code });
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
@@ -1,31 +0,0 @@
|
||||
import { file, serve } from "bun";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
const pathname = new URL(req.url).pathname.substring(1);
|
||||
|
||||
// If the URL is empty, display this file.
|
||||
if (pathname === "") {
|
||||
return new Response(file(import.meta.url.replace("file://", "")));
|
||||
}
|
||||
|
||||
return new Response(file(pathname));
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
@@ -1,17 +0,0 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
serve({
|
||||
async fetch(req) {
|
||||
// body is a ReadableStream
|
||||
const body = req.body;
|
||||
|
||||
const writer = Bun.file(`upload.${Date.now()}.txt`).writer();
|
||||
for await (const chunk of body!) {
|
||||
writer.write(chunk);
|
||||
}
|
||||
const wrote = await writer.end();
|
||||
|
||||
// @ts-ignore
|
||||
return Response.json({ wrote, type: req.headers.get("Content-Type") });
|
||||
},
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
const server = serve({
|
||||
fetch(req) {
|
||||
return new Response(`Pending requests count: ${this.pendingRequests}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Stop the server after 5 seconds
|
||||
setTimeout(() => {
|
||||
server.stop();
|
||||
}, 5000);
|
||||
@@ -1,34 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
Bun.serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(`Echo: ${req.url}`);
|
||||
},
|
||||
|
||||
// baseURI: "http://localhost:3000",
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(\n" + err.toString(), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables bun's default error handler
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
// Start a fast HTTP server from the main file's export
|
||||
// export default {
|
||||
// fetch(req) {
|
||||
// return new Response(
|
||||
// `This is another way to start a server!
|
||||
// if the main file export default's an object
|
||||
// with 'fetch'. Bun automatically calls Bun.serve`
|
||||
// );
|
||||
// },
|
||||
// // so autocomplete & type checking works
|
||||
// } as Bun.Serve;
|
||||
@@ -1,193 +0,0 @@
|
||||
const { AWS_LAMBDA_RUNTIME_API, LAMBDA_TASK_ROOT, _HANDLER } = process.env;
|
||||
|
||||
if (!AWS_LAMBDA_RUNTIME_API || AWS_LAMBDA_RUNTIME_API === "") {
|
||||
throw new Error("AWS_LAMBDA_RUNTIME_API is not set");
|
||||
}
|
||||
|
||||
const nextURL = `http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/next`;
|
||||
const sourceDir = LAMBDA_TASK_ROOT;
|
||||
if (!sourceDir) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!_HANDLER) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
|
||||
// don't care if this fails
|
||||
if (process.cwd() !== sourceDir) {
|
||||
try {
|
||||
process.chdir(sourceDir);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
var handlerDot = _HANDLER.lastIndexOf(".");
|
||||
var sourcefile = handlerDot > 0 ? _HANDLER.substring(0, handlerDot) : _HANDLER;
|
||||
if (sourcefile.length === 0) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!sourcefile.startsWith("/")) {
|
||||
sourcefile = `./${sourcefile}`;
|
||||
}
|
||||
function noop() {}
|
||||
const method = (handlerDot > 0 ? _HANDLER.substring(handlerDot) : "") || "GET";
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`Loaded ${sourcefile}`);
|
||||
}
|
||||
var Handler;
|
||||
|
||||
try {
|
||||
Handler = await import(sourcefile);
|
||||
} catch (e: any) {
|
||||
console.error("Error loading sourcefile:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`Loaded ${sourcefile}`);
|
||||
}
|
||||
|
||||
const handlerFunction = Handler.default?.fetch;
|
||||
if (typeof handlerFunction !== "function") {
|
||||
const e = new Error(`${sourcefile} must export default a function called fetch
|
||||
|
||||
Here is an example:
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
return new Response("Hello World");
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
console.error(e);
|
||||
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var baseURLString = AWS_LAMBDA_RUNTIME_API;
|
||||
if ("baseURI" in Handler.default) {
|
||||
baseURLString = Handler.default.baseURI?.toString();
|
||||
}
|
||||
|
||||
var baseURL;
|
||||
try {
|
||||
baseURL = new URL(baseURLString);
|
||||
} catch (e: any) {
|
||||
console.error("Error parsing baseURI:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") || [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runHandler(response: Response) {
|
||||
const traceID = response.headers.get("Lambda-Runtime-Trace-Id");
|
||||
const requestID = response.headers.get("Lambda-Runtime-Aws-Request-Id");
|
||||
var request = new Request(baseURL.href, {
|
||||
method,
|
||||
headers: response.headers,
|
||||
body: parseInt(response.headers.get("Content-Length") || "0", 10) > 0 ? await response.blob() : undefined,
|
||||
});
|
||||
// we are done with the Response object here
|
||||
// allow it to be GC'd
|
||||
(response as any) = undefined;
|
||||
|
||||
var result: Response;
|
||||
try {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
result = handlerFunction(request, {});
|
||||
if (result && (result as any).then) {
|
||||
await result;
|
||||
}
|
||||
} catch (e1: any) {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.error(`[${traceID}] Error running handler:`, e1);
|
||||
}
|
||||
fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
|
||||
body: JSON.stringify({
|
||||
errorMessage: e1.message,
|
||||
errorType: e1.name,
|
||||
stackTrace: e1?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
}).finally(noop);
|
||||
return;
|
||||
} finally {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!result || !("headers" in result)) {
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
errorMessage: "Expected Response object",
|
||||
errorType: "ExpectedResponseObject",
|
||||
stackTrace: [],
|
||||
}),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/response`, {
|
||||
method: "POST",
|
||||
headers: result.headers,
|
||||
body: await result.blob(),
|
||||
});
|
||||
(result as any) = undefined;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
fetch(nextURL).then(runHandler, console.error);
|
||||
}
|
||||
|
||||
export {};
|
||||
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "macros",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/prop-types": ["@types/prop-types@15.7.5", "", {}, "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w=="],
|
||||
|
||||
"@types/react": ["@types/react@17.0.53", "", { "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", "csstype": "^3.0.2" } }, "sha512-1yIpQR2zdYu1Z/dc1OxC+MA6GR240u3gcnP4l6mvj/PJiVaqHsQPmWttsvHsfnhfPbU2FuGmo0wSITPygjBmsw=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@17.0.19", "", { "dependencies": { "@types/react": "^17" } }, "sha512-PiYG40pnQRdPHnlf7tZnp0aQ6q9tspYr72vD61saO6zFCybLfMqwUCN0va1/P+86DXn18ZWeW30Bk7xlC5eEAQ=="],
|
||||
|
||||
"@types/scheduler": ["@types/scheduler@0.16.2", "", {}, "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="],
|
||||
|
||||
"csstype": ["csstype@3.1.1", "", {}, "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"moment": ["moment@2.29.4", "", {}, "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"papaparse": ["papaparse@5.3.2", "", {}, "sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw=="],
|
||||
|
||||
"react": ["react@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA=="],
|
||||
|
||||
"react-dom": ["react-dom@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "scheduler": "^0.20.2" }, "peerDependencies": { "react": "17.0.2" } }, "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA=="],
|
||||
|
||||
"react-refresh": ["react-refresh@0.10.0", "", {}, "sha512-PgidR3wST3dDYKr6b4pJoqQFpPGNKDSCDx4cZoshjXipw3LzO7mG1My2pwEzz2JVkF+inx3xRpDeQLFQGH/hsQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.20.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ=="],
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import { fetchCSV } from "macro:fetchCSV";
|
||||
|
||||
export const Covid19 = () => {
|
||||
const rows = fetchCSV("https://covid19.who.int/WHO-COVID-19-global-data.csv", {
|
||||
last: 100,
|
||||
columns: ["New_cases", "Date_reported", "Country"],
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>Covid-19</h2>
|
||||
<h6>last {rows.length} updates from the WHO</h6>
|
||||
<div className="Table">
|
||||
<div className="Header">
|
||||
<div className="Heading">New Cases</div>
|
||||
<div className="Heading">Date</div>
|
||||
<div className="Heading">Country</div>
|
||||
</div>
|
||||
|
||||
{rows.map((row, index) => (
|
||||
<div className="Row" key={index}>
|
||||
<div className="Column">{row[0]}</div>
|
||||
<div className="Column">{row[1]}</div>
|
||||
<div className="Column">{row[2]}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
// source code
|
||||
import { matchInFile } from "macro:matchInFile";
|
||||
|
||||
export const IPAddresses = () => (
|
||||
<div>
|
||||
<h2>recent ip addresses</h2>
|
||||
<div className="Lines">
|
||||
{matchInFile("access.log", /^(?:[0-9]{1,3}\.){3}[0-9]{1,3}/).map((ipAddress, index) => (
|
||||
<div className="Line" key={index}>
|
||||
{ipAddress}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -1,15 +0,0 @@
|
||||
import * as ReactDOM from "react-dom";
|
||||
import * as React from "react";
|
||||
import { IPAddresses } from "./example";
|
||||
import { Covid19 } from "./covid19";
|
||||
|
||||
const Start = function () {
|
||||
const root = document.createElement("div");
|
||||
document.body.appendChild(root);
|
||||
|
||||
// comment out to switch between examples
|
||||
// ReactDOM.render(<IPAddresses />, root);
|
||||
ReactDOM.render(<Covid19 />, root);
|
||||
};
|
||||
|
||||
Start();
|
||||
@@ -1,4 +0,0 @@
|
||||
// source code
|
||||
import { mysteryBox } from "macro:./mystery-box";
|
||||
|
||||
export default "You roll! " + mysteryBox(123);
|
||||
@@ -1,54 +0,0 @@
|
||||
import Pappa from "papaparse";
|
||||
// Example usage:
|
||||
// const rows = fetchCSV(
|
||||
// "https://covid19.who.int/WHO-COVID-19-global-data.csv",
|
||||
// {
|
||||
// last: 100,
|
||||
// columns: ["New_cases", "Date_reported", "Country"],
|
||||
// }
|
||||
// );
|
||||
export async function fetchCSV(callExpression) {
|
||||
console.time("fetchCSV Total");
|
||||
const [
|
||||
urlNode,
|
||||
{
|
||||
properties: { last: limit = 10, columns = [] },
|
||||
},
|
||||
] = callExpression.arguments;
|
||||
const url = urlNode.get();
|
||||
|
||||
console.time("Fetch");
|
||||
const response = await fetch(url);
|
||||
const csvText = await response.text();
|
||||
console.timeEnd("Fetch");
|
||||
|
||||
console.time("Parse");
|
||||
let rows = Pappa.parse(csvText, { fastMode: true }).data;
|
||||
console.timeEnd("Parse");
|
||||
|
||||
console.time("Render");
|
||||
const columnIndices = new Array(columns.length);
|
||||
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
columnIndices[i] = rows[0].indexOf(columns[i]);
|
||||
}
|
||||
|
||||
rows = rows
|
||||
.slice(Math.max(limit, rows.length) - limit)
|
||||
.reverse()
|
||||
.filter(columns => columns.every(Boolean));
|
||||
const value = (
|
||||
<array>
|
||||
{rows.map(columns => (
|
||||
<array>
|
||||
{columnIndices.map(columnIndex => (
|
||||
<string value={columns[columnIndex]} />
|
||||
))}
|
||||
</array>
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
console.timeEnd("Render");
|
||||
console.timeEnd("fetchCSV Total");
|
||||
return value;
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
// macro code
|
||||
export async function matchInFile(callExpression: BunAST.CallExpression) {
|
||||
const [filePathNode, matcherNode] = callExpression.arguments;
|
||||
let filePath: string;
|
||||
filePath = filePathNode.get();
|
||||
|
||||
let matcher: RegExp;
|
||||
matcher = matcherNode.get();
|
||||
const file: string = await Bun.file(Bun.cwd + filePath).text();
|
||||
|
||||
return (
|
||||
<array>
|
||||
{file
|
||||
.split("\n")
|
||||
.map(line => line.match(matcher))
|
||||
.filter(Boolean)
|
||||
.reverse()
|
||||
.map(line => (
|
||||
<string value={line[0]} />
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
export function mysteryBox(callExpression) {
|
||||
console.log(callExpression.log);
|
||||
// get arguments
|
||||
const [countNode] = callExpression.arguments;
|
||||
const countString: string = countNode.get();
|
||||
const count: number = parseInt(countString, 10);
|
||||
|
||||
// validate
|
||||
if (!(count >= 1 && count <= 1000)) return new Error(`Argument ${countString} is expected to be between 1 and 1000`);
|
||||
|
||||
// return a value
|
||||
return (Math.random() * count) | 0;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import moment from "moment";
|
||||
export function now(node) {
|
||||
var fmt = "HH:mm:ss";
|
||||
const args = node.arguments;
|
||||
if (args[0] instanceof <string />) {
|
||||
fmt = args[0].get();
|
||||
}
|
||||
const time = moment().format(fmt);
|
||||
return <string value={time}></string>;
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "macros",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9"
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Macro test</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
|
||||
<link rel="stylesheet" href="/styles.css" type="text/css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script async type="module" src="/components/index.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,47 +0,0 @@
|
||||
html {
|
||||
font-size: 4rem;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background-color: black;
|
||||
|
||||
color: rgb(0, 255, 0);
|
||||
font-family: "Courier";
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 48px auto;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.Line {
|
||||
font-size: 0.5rem;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.Table {
|
||||
display: grid;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
.Row,
|
||||
.Header {
|
||||
display: grid;
|
||||
grid-template-columns: 2fr 1fr 1fr;
|
||||
text-align: right;
|
||||
|
||||
column-gap: 2rem;
|
||||
}
|
||||
|
||||
.Heading {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.Header {
|
||||
border-bottom: 1px solid rgb(0, 255, 0);
|
||||
margin-bottom: 20px;
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
.Heading:nth-of-type(2) {
|
||||
text-align: left;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {},
|
||||
"jsx": "preserve"
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
const map = Bun.mmap("./mmap.txt", { shared: true });
|
||||
const utf8decoder = new TextDecoder("utf-8");
|
||||
|
||||
let old = new TextEncoder().encode("12345");
|
||||
|
||||
setInterval(() => {
|
||||
old = old.sort((a, b) => (Math.random() > 0.5 ? -1 : 1));
|
||||
console.log(`changing mmap to ~> ${utf8decoder.decode(old)}`);
|
||||
|
||||
map.set(old);
|
||||
}, 4);
|
||||
@@ -1,22 +0,0 @@
|
||||
const map = Bun.mmap("./mmap.txt");
|
||||
|
||||
function buffer_hash(buffer) {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
hash = (hash << 5) - hash + buffer[i];
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
let hash = buffer_hash(map);
|
||||
console.log(decoder.decode(map));
|
||||
|
||||
while (true) {
|
||||
if (buffer_hash(map) !== hash) {
|
||||
hash = buffer_hash(map);
|
||||
console.log(`mmap changed to ~> ${decoder.decode(map)}`);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
43521
|
||||
@@ -1,23 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
import { parse } from "querystring";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
|
||||
var pathname = req.url.substring(1);
|
||||
const q = pathname.indexOf("?");
|
||||
var { editor } = parse(pathname.substring(q + 1)) || {};
|
||||
|
||||
if (q > 0) {
|
||||
pathname = pathname.substring(0, q);
|
||||
}
|
||||
|
||||
Bun.openInEditor(resolve(pathname), {
|
||||
editor,
|
||||
});
|
||||
|
||||
return new Response(`Opened ${req.url}`);
|
||||
},
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"name": "simple-react",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@emotion/css": "^11.1.3",
|
||||
"@vitejs/plugin-react-refresh": "^1.3.3",
|
||||
"antd": "^4.16.1",
|
||||
"left-pad": "^1.3.0",
|
||||
"next": "^11.0.0",
|
||||
"parcel": "2.0.0-beta.3",
|
||||
"react": "^17.0.2",
|
||||
"react-bootstrap": "^1.6.1",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-form": "^4.0.1",
|
||||
"react-hook-form": "^7.8.3"
|
||||
},
|
||||
"parcel": "parceldist/index.js",
|
||||
"targets": {
|
||||
"parcel": {
|
||||
"outputFormat": "esmodule",
|
||||
"sourceMap": false,
|
||||
"optimize": false,
|
||||
"engines": {
|
||||
"chrome": "last 1 version"
|
||||
}
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@snowpack/plugin-react-refresh": "^2.5.0",
|
||||
"typescript": "^4.3.4"
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
crossorigin="anonymous"
|
||||
href="https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@400;700&family=Space+Mono:wght@400;700&display=swap"
|
||||
/>
|
||||
</head>
|
||||
<body>
|
||||
<div id="reactroot"></div>
|
||||
<link rel="stylesheet" href="./src/index.css" />
|
||||
<script src="./src/index.tsx" async type="module"></script>
|
||||
</body>
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +0,0 @@
|
||||
:root {
|
||||
--timestamp: "0";
|
||||
--interval: "8";
|
||||
--progress-bar: 11.83299999999997%;
|
||||
--spinner-1-muted: rgb(142, 6, 182);
|
||||
--spinner-1-primary: rgb(177, 8, 227);
|
||||
--spinner-2-muted: rgb(110, 148, 190);
|
||||
--spinner-2-primary: rgb(138, 185, 238);
|
||||
--spinner-3-muted: rgb(75, 45, 64);
|
||||
--spinner-3-primary: rgb(94, 56, 80);
|
||||
--spinner-4-muted: rgb(155, 129, 108);
|
||||
--spinner-4-primary: rgb(194, 161, 135);
|
||||
--spinner-rotate: 213deg;
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
import React from "react";
|
||||
|
||||
export function RenderCounter({ name, children }) {
|
||||
const counter = React.useRef(1);
|
||||
return (
|
||||
<div className="RenderCounter">
|
||||
<div className="RenderCounter-meta">
|
||||
<div className="RenderCounter-title">
|
||||
{name} rendered <strong>{counter.current++} times</strong>
|
||||
</div>
|
||||
<div className="RenderCounter-lastRender">
|
||||
LAST RENDER:{" "}
|
||||
{new Intl.DateTimeFormat([], {
|
||||
timeStyle: "long",
|
||||
}).format(new Date())}
|
||||
</div>
|
||||
</div>
|
||||
<div className="RenderCounter-children">{children}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import * as React from "react";
|
||||
import { Button } from "./Button";
|
||||
import { RenderCounter } from "./RenderCounter";
|
||||
export function App() {
|
||||
return (
|
||||
<RenderCounter name="App">
|
||||
<div className="AppRoot">
|
||||
<h1>This is the root element</h1>
|
||||
|
||||
<Button>Click</Button>
|
||||
</div>
|
||||
</RenderCounter>
|
||||
);
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
import { RenderCounter } from "./RenderCounter";
|
||||
|
||||
export const Button = ({ children }) => {
|
||||
return (
|
||||
<RenderCounter name="Button">
|
||||
<div className="Button">{children}</div>
|
||||
</RenderCounter>
|
||||
);
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
@import "https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@400;700&family=Space+Mono:wght@400;700&display=swap";
|
||||
@@ -1,98 +0,0 @@
|
||||
@import "./colors.css";
|
||||
|
||||
:root {
|
||||
--heading-font: "Space Mono", system-ui;
|
||||
--body-font: "IBM Plex Sans", system-ui;
|
||||
|
||||
--color-brand: #02ff00;
|
||||
--color-brand-muted: rgb(2, 150, 0);
|
||||
|
||||
--padding-horizontal: 90px;
|
||||
|
||||
--page-background: black;
|
||||
--page-background-alpha: rgba(0, 0, 0, 0.8);
|
||||
|
||||
--result__background-color: black;
|
||||
--result__primary-color: var(--color-brand);
|
||||
--result__foreground-color: white;
|
||||
--result__muted-color: rgb(165, 165, 165);
|
||||
|
||||
--card-width: 352px;
|
||||
|
||||
--page-width: 1152px;
|
||||
|
||||
--snippets_container-background-unfocused: #171717;
|
||||
--snippets_container-background-focused: #0017e9;
|
||||
--snippets_container-background: var(
|
||||
--snippets_container-background-unfocused
|
||||
);
|
||||
--snippets_container-muted-color: rgb(153, 153, 153);
|
||||
}
|
||||
|
||||
body {
|
||||
color: white;
|
||||
margin: 0;
|
||||
|
||||
padding: 0;
|
||||
font-family: var(--body-font);
|
||||
background-color: var(--page-background);
|
||||
color: var(--result__muted-color);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#reactroot,
|
||||
#__next,
|
||||
body,
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.RenderCounter {
|
||||
border: 10px solid var(--snippets_container-background-focused);
|
||||
margin: 10px;
|
||||
padding: 10px;
|
||||
animation: flash 0.2s linear;
|
||||
animation-fill-mode: forwards;
|
||||
}
|
||||
|
||||
.RenderCounter-meta {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
margin: -10px;
|
||||
padding: 10px;
|
||||
background-color: #111;
|
||||
}
|
||||
|
||||
.RenderCounter-lastRender,
|
||||
.RenderCounter-title {
|
||||
white-space: nowrap;
|
||||
color: rgb(153, 153, 153);
|
||||
}
|
||||
|
||||
@keyframes flash {
|
||||
from {
|
||||
border-color: var(--snippets_container-background-focused);
|
||||
}
|
||||
|
||||
to {
|
||||
border-color: var(--snippets_container-background-unfocused);
|
||||
}
|
||||
}
|
||||
|
||||
.Button {
|
||||
display: block;
|
||||
|
||||
border: 1px solid rgb(20, 180, 0);
|
||||
background-color: rgb(2, 150, 0);
|
||||
color: white;
|
||||
font-weight: 500;
|
||||
padding: 10px 12px;
|
||||
border-radius: 4px;
|
||||
text-transform: uppercase;
|
||||
text-align: center;
|
||||
width: fit-content;
|
||||
cursor: pointer;
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import ReactDOM from "react-dom";
|
||||
import React from "react";
|
||||
import { App } from "./components/app";
|
||||
import classNames from "classnames";
|
||||
|
||||
function startReact() {
|
||||
ReactDOM.render(<App />, document.querySelector("#reactroot"));
|
||||
}
|
||||
|
||||
globalThis.addEventListener("DOMContentLoaded", () => {
|
||||
startReact();
|
||||
});
|
||||
startReact();
|
||||
|
||||
export { App };
|
||||
@@ -1,62 +0,0 @@
|
||||
import React from "react";
|
||||
|
||||
export const Main = ({ productName }) => {
|
||||
return (
|
||||
<>
|
||||
<header>
|
||||
<div className="Title">CSS HMR Stress Test</div>
|
||||
<p className="Description">
|
||||
This page visually tests how quickly a bundler can update CSS over Hot Module Reloading.
|
||||
</p>
|
||||
</header>
|
||||
<main className="main">
|
||||
<section className="ProgressSection">
|
||||
<p className="Subtitle">
|
||||
<span className="Subtitle-part">
|
||||
Ran: <span className="timer"></span>
|
||||
</span>
|
||||
</p>
|
||||
|
||||
<div className="ProgressBar-container">
|
||||
<div className="ProgressBar"></div>
|
||||
</div>
|
||||
<div className="SectionLabel">The progress bar should move from left to right smoothly.</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<div className="Spinners">
|
||||
<div className="Spinner-container Spinner-1">
|
||||
<div className="Spinner"></div>
|
||||
</div>
|
||||
|
||||
<div className="Spinner-container Spinner-2">
|
||||
<div className="Spinner"></div>
|
||||
</div>
|
||||
|
||||
<div className="Spinner-container Spinner-3">
|
||||
<div className="Spinner"></div>
|
||||
</div>
|
||||
|
||||
<div className="Spinner-container Spinner-4">
|
||||
<div className="Spinner"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="SectionLabel">The spinners should rotate & change color smoothly.</div>
|
||||
</section>
|
||||
</main>
|
||||
<footer>
|
||||
<div className="SectionLabel FooterLabel">There are no CSS animations on this page.</div>
|
||||
|
||||
<div className="Bundler-container">
|
||||
<div className="Bundler">{productName}</div>
|
||||
<div className="Bundler-updateRate">
|
||||
Saving a css file every
|
||||
<span className="highlight">
|
||||
<span className="interval"></span>ms
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"lib": [
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"esnext"
|
||||
],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": false,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve"
|
||||
},
|
||||
"include": [
|
||||
"next-env.d.ts",
|
||||
"**/*.ts",
|
||||
"**/*.tsx"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
Binary file not shown.
@@ -1,20 +0,0 @@
|
||||
// A simple way to connect FileSystemRouter to Bun#serve
|
||||
// run with `bun run index.tsx`
|
||||
|
||||
import { renderToReadableStream } from "react-dom/server";
|
||||
import { FileSystemRouter } from "bun";
|
||||
|
||||
export default {
|
||||
port: 3000,
|
||||
async fetch(request: Request) {
|
||||
const router = new FileSystemRouter({
|
||||
dir: process.cwd() + "/pages",
|
||||
style: "nextjs",
|
||||
});
|
||||
|
||||
const route = router.match(request);
|
||||
|
||||
const { default: Root } = await import(route.filePath!);
|
||||
return new Response(await renderToReadableStream(<Root {...route.params} />));
|
||||
},
|
||||
};
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"name": "react-routes",
|
||||
"module": "index.tsx",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.0.27",
|
||||
"@types/react-dom": "^18.0.10",
|
||||
"bun-types": "^0.7.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
// reachable from http://localhost:3000/
|
||||
|
||||
export default () => (
|
||||
<html>
|
||||
<head>
|
||||
<title>index</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>
|
||||
<a href="/one">one</a>
|
||||
</h1>
|
||||
<h1>
|
||||
<a href="/two">two</a>
|
||||
</h1>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
@@ -1,12 +0,0 @@
|
||||
// reachable from http://localhost:3000/one
|
||||
|
||||
export default () => (
|
||||
<html>
|
||||
<head>
|
||||
<title>one</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>one</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
@@ -1,12 +0,0 @@
|
||||
// reachable from http://localhost:3000/two
|
||||
|
||||
export default () => (
|
||||
<html>
|
||||
<head>
|
||||
<title>two</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>two</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"ESNext"
|
||||
],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"strict": false,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"jsx": "preserve",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"types": [
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
import { SHA1, SHA256, SHA512, SHA384, SHA512_256, MD5, MD4, RIPEMD160, sha } from "bun";
|
||||
|
||||
const input = "Hello World";
|
||||
const [first, second] = input.split(" ");
|
||||
|
||||
const log = (name, ...args) => console.log(`${name}:`.padStart("SHA512_256: ".length), ...args);
|
||||
|
||||
console.log("");
|
||||
// This is SHA512-256:
|
||||
// This function is shorthand for SHA512_256.hash(input)
|
||||
log("Bun.sha()", sha(input, "base64"));
|
||||
|
||||
log("SHA1", SHA1.hash(input, "hex"));
|
||||
log("SHA256", SHA256.hash(input, "hex"));
|
||||
log("SHA384", SHA384.hash(input, "hex"));
|
||||
log("SHA512", SHA512.hash(input, "hex"));
|
||||
log("SHA512_256", SHA512_256.hash(input, "hex"));
|
||||
log("RIPEMD160", RIPEMD160.hash(input, "hex"));
|
||||
|
||||
console.log("");
|
||||
console.log("---- Chunked ----");
|
||||
console.log("");
|
||||
|
||||
// You can also do updates in chunks:
|
||||
// const hash = new Hash();
|
||||
for (let Hash of [SHA1, SHA256, SHA384, SHA512, SHA512_256, RIPEMD160]) {
|
||||
const hash = new Hash();
|
||||
hash.update(first);
|
||||
hash.update(" " + second);
|
||||
log(Hash.name, hash.digest("hex"));
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log("---- Base64 ----");
|
||||
console.log("");
|
||||
|
||||
// base64 or hex
|
||||
for (let Hash of [SHA1, SHA256, SHA384, SHA512, SHA512_256]) {
|
||||
const hash = new Hash();
|
||||
hash.update(first);
|
||||
hash.update(" " + second);
|
||||
log(Hash.name, hash.digest("base64"));
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log("---- Uint8Array ----");
|
||||
console.log("");
|
||||
|
||||
// Uint8Array by default
|
||||
for (let Hash of [SHA1, SHA256, SHA384, SHA512, SHA512_256]) {
|
||||
const hash = new Hash();
|
||||
hash.update(first);
|
||||
hash.update(" " + second);
|
||||
log(Hash.name, hash.digest());
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log("---- Uint8Array can be updated in-place ----");
|
||||
console.log("");
|
||||
|
||||
var oneBuf = new Uint8Array(1024);
|
||||
// Update Uint8Array in-place instead of allocating a new one
|
||||
for (let Hash of [SHA1, SHA256, SHA384, SHA512, SHA512_256]) {
|
||||
const hash = new Hash();
|
||||
hash.update(first);
|
||||
hash.update(" " + second);
|
||||
log(Hash.name, hash.digest(oneBuf).subarray(0, Hash.byteLength));
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
import { spawn, which } from "bun";
|
||||
import { rmSync } from "fs";
|
||||
import { basename } from "path";
|
||||
|
||||
const repo = process.argv.at(3) || "TheoBr/vercel-vite-demo";
|
||||
|
||||
const target = basename(repo) + "-main";
|
||||
console.log("Downloading", repo, "to", "/tmp/" + target);
|
||||
|
||||
const archive = await fetch(`https://github.com/${repo}/archive/refs/heads/main.tar.gz`);
|
||||
|
||||
// remove the directory if it already exists locally
|
||||
rmSync("/tmp/" + target, { recursive: true, force: true });
|
||||
|
||||
const tar = spawn({
|
||||
cmd: ["tar", "-xzf", "-"],
|
||||
stdin: archive.body,
|
||||
|
||||
stderr: "inherit",
|
||||
stdout: "inherit",
|
||||
cwd: "/tmp",
|
||||
});
|
||||
|
||||
await tar.exited;
|
||||
|
||||
// if vercel isn't installed, install it
|
||||
if (!which("vercel")) {
|
||||
console.log("Installing vercel...");
|
||||
|
||||
const installer = spawn(["bun", "install", "-g", "vercel"], {
|
||||
stderr: "inherit",
|
||||
stdout: "inherit",
|
||||
stdin: "inherit",
|
||||
});
|
||||
await installer.exited;
|
||||
|
||||
if (!which("vercel")) {
|
||||
throw new Error("Failed to install Vercel CLI");
|
||||
}
|
||||
}
|
||||
|
||||
const { exited: deployed } = spawn({
|
||||
cmd: ["vercel", "deploy", "--yes", "--public", target],
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
cwd: "/tmp",
|
||||
});
|
||||
|
||||
await deployed;
|
||||
@@ -1,15 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
import type { ServeOptions } from "bun";
|
||||
|
||||
const development = process.env.NODE_ENV !== "production";
|
||||
export default {
|
||||
fetch(req: Request) {
|
||||
return new Response(Bun.file(resolve(req.url.substring(1))));
|
||||
},
|
||||
|
||||
// hostname: "0.0.0.0",
|
||||
port: process.env.PORT || "443",
|
||||
keyFile: process.env.SSL_KEY_FILE || "./key.pem",
|
||||
certFile: process.env.SSL_CERTIFICATE_FILE || "./cert.pem",
|
||||
development,
|
||||
} as ServeOptions;
|
||||
@@ -1,49 +0,0 @@
|
||||
import { listen, connect } from "bun";
|
||||
|
||||
var counter = 0;
|
||||
const msg = Buffer.from("Hello World!");
|
||||
|
||||
const handlers = {
|
||||
open(socket) {
|
||||
if (!socket.data?.isServer) {
|
||||
if (!socket.write(msg)) {
|
||||
socket.data = { pending: msg };
|
||||
}
|
||||
}
|
||||
},
|
||||
data(socket, buffer) {
|
||||
if (!socket.write(buffer)) {
|
||||
socket.data = { pending: buffer };
|
||||
return;
|
||||
}
|
||||
counter++;
|
||||
},
|
||||
drain(socket) {
|
||||
const pending = socket.data?.pending;
|
||||
if (!pending) return;
|
||||
if (socket.write(pending)) {
|
||||
socket.data = undefined;
|
||||
counter++;
|
||||
return;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
setInterval(() => {
|
||||
console.log("Wrote", counter, "messages");
|
||||
counter = 0;
|
||||
}, 1000);
|
||||
|
||||
const server = listen({
|
||||
socket: handlers,
|
||||
hostname: "localhost",
|
||||
port: 8080,
|
||||
data: {
|
||||
isServer: true,
|
||||
},
|
||||
});
|
||||
const connection = await connect({
|
||||
socket: handlers,
|
||||
hostname: "localhost",
|
||||
port: 8080,
|
||||
});
|
||||
@@ -686,7 +686,7 @@ def add(debugger, *, category, regex=False, type, identifier=None, synth=False,
|
||||
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
|
||||
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
|
||||
|
||||
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
|
||||
def MultiArrayList_Entry(type): return 'multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
|
||||
|
||||
def __lldb_init_module(debugger, _=None):
|
||||
# Initialize Zig Categories
|
||||
@@ -701,8 +701,8 @@ def __lldb_init_module(debugger, _=None):
|
||||
# Initialize Zig Standard Library
|
||||
add(debugger, category='zig.std', type='mem.Allocator', summary='${var.ptr}')
|
||||
add(debugger, category='zig.std', regex=True, type='^segmented_list\\.SegmentedList\\(.*\\)$', identifier='std_SegmentedList', synth=True, expand=True, summary='len=${var.len}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type=MultiArrayList_Entry('.*'), identifier='std_Entry', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)$', identifier='std_HashMapUnmanaged', synth=True, expand=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)\\.Entry$', identifier = 'std_Entry', synth=True, inline_children=True, summary=True)
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.7.2"
|
||||
"typescript": "5.9.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"name": "bun-plugin-svelte",
|
||||
"devDependencies": {
|
||||
"@threlte/core": "8.0.1",
|
||||
"bun-types": "canary",
|
||||
"@types/bun": "../bun-types",
|
||||
"svelte": "^5.20.4",
|
||||
},
|
||||
"peerDependencies": {
|
||||
@@ -28,11 +28,13 @@
|
||||
|
||||
"@threlte/core": ["@threlte/core@8.0.1", "", { "dependencies": { "mitt": "^3.0.1" }, "peerDependencies": { "svelte": ">=5", "three": ">=0.155" } }, "sha512-vy1xRQppJFNmfPTeiRQue+KmYFsbPgVhwuYXRTvVrwPeD2oYz43gxUeOpe1FACeGKxrxZykeKJF5ebVvl7gBxw=="],
|
||||
|
||||
"@types/bun": ["bun-types@file:../bun-types", { "dependencies": { "@types/node": "*" }, "devDependencies": { "@types/react": "^19" }, "peerDependencies": { "@types/react": "^19" } }],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="],
|
||||
|
||||
"@types/node": ["@types/node@22.13.5", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg=="],
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="],
|
||||
"@types/react": ["@types/react@19.1.9", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA=="],
|
||||
|
||||
"acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||
|
||||
@@ -42,10 +44,10 @@
|
||||
|
||||
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.4-canary.20250226T140704", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-P8b2CGLtbvi/kQ4dPHBhU5qkguIjHMYCjNqjWDTKSnodWDTbcv9reBdktZJ7m5SF4m15JLthfFq2PtwKpA9a+w=="],
|
||||
|
||||
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"esm-env": ["esm-env@1.2.2", "", {}, "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA=="],
|
||||
|
||||
"esrap": ["esrap@1.4.5", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-CjNMjkBWWZeHn+VX+gS8YvFwJ5+NDhg8aWZBSFJPR8qQduDNjbJodA2WcwCm7uQa5Rjqj+nZvVmceg1RbHFB9g=="],
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"build:types": "tsc --emitDeclarationOnly --declaration --declarationDir ./dist"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bun-types": "canary",
|
||||
"@types/bun": "../bun-types",
|
||||
"svelte": "^5.20.4",
|
||||
"@threlte/core": "8.0.1"
|
||||
},
|
||||
|
||||
134
packages/bun-types/bun.d.ts
vendored
134
packages/bun-types/bun.d.ts
vendored
@@ -21,7 +21,7 @@ declare module "bun" {
|
||||
| DataView<TArrayBuffer>;
|
||||
type BufferSource = NodeJS.TypedArray | DataView | ArrayBufferLike;
|
||||
type StringOrBuffer = string | NodeJS.TypedArray | ArrayBufferLike;
|
||||
type XMLHttpRequestBodyInit = Blob | BufferSource | string | FormData | Iterable<Uint8Array>;
|
||||
type XMLHttpRequestBodyInit = Blob | BufferSource | FormData | URLSearchParams | string;
|
||||
type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
|
||||
type ReadableStreamDefaultReadResult<T> =
|
||||
| ReadableStreamDefaultReadValueResult<T>
|
||||
@@ -826,7 +826,7 @@ declare module "bun" {
|
||||
buffers: Array<ArrayBufferView | ArrayBufferLike>,
|
||||
maxLength: number,
|
||||
asUint8Array: true,
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
@@ -843,35 +843,6 @@ declare module "bun" {
|
||||
stream: ReadableStream<ArrayBufferView | ArrayBufferLike>,
|
||||
): Promise<ArrayBuffer> | ArrayBuffer;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single {@link ArrayBuffer}.
|
||||
*
|
||||
* Each chunk must be a TypedArray or an ArrayBuffer. If you need to support
|
||||
* chunks of different types, consider {@link readableStreamToBlob}
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks or the concatenated chunks as a {@link Uint8Array}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.bytes}
|
||||
*/
|
||||
function readableStreamToBytes(
|
||||
stream: ReadableStream<ArrayBufferView | ArrayBufferLike>,
|
||||
): Promise<Uint8Array> | Uint8Array;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single {@link Blob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link Blob}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.blob}
|
||||
*/
|
||||
function readableStreamToBlob(stream: ReadableStream): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
@@ -904,30 +875,6 @@ declare module "bun" {
|
||||
multipartBoundaryExcludingDashes?: string | NodeJS.TypedArray | ArrayBufferView,
|
||||
): Promise<FormData>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single string. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.text}
|
||||
*/
|
||||
function readableStreamToText(stream: ReadableStream): Promise<string>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single string and parse as JSON. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.json}
|
||||
*/
|
||||
function readableStreamToJSON(stream: ReadableStream): Promise<any>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
@@ -1027,8 +974,8 @@ declare module "bun" {
|
||||
*
|
||||
* This API might change later to separate Uint8ArraySink and ArrayBufferSink
|
||||
*/
|
||||
flush(): number | Uint8Array | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array;
|
||||
flush(): number | Uint8Array<ArrayBuffer> | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array<ArrayBuffer>;
|
||||
}
|
||||
|
||||
/** DNS Related APIs */
|
||||
@@ -1595,13 +1542,18 @@ declare module "bun" {
|
||||
* Executes a SQL query using template literals
|
||||
* @example
|
||||
* ```ts
|
||||
* const [user] = await sql`select * from users where id = ${1}`;
|
||||
* const [user] = await sql<Users[]>`select * from users where id = ${1}`;
|
||||
* ```
|
||||
*/
|
||||
<T = any>(strings: TemplateStringsArray, ...values: unknown[]): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Execute a SQL query using a string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const users = await sql<User[]>`SELECT * FROM users WHERE id = ${1}`;
|
||||
* ```
|
||||
*/
|
||||
<T = any>(string: string): SQL.Query<T>;
|
||||
|
||||
@@ -1620,6 +1572,23 @@ declare module "bun" {
|
||||
* const result = await sql`insert into users ${sql(user)} returning *`;
|
||||
* ```
|
||||
*/
|
||||
<T extends { [Key in PropertyKey]: unknown }>(obj: T | T[] | readonly T[]): SQL.Helper<T>;
|
||||
|
||||
/**
|
||||
* Helper function for inserting an object into a query, supporting specific columns
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Insert an object
|
||||
* const result = await sql`insert into users ${sql(users)} returning *`;
|
||||
*
|
||||
* // Or pick specific columns
|
||||
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
|
||||
*
|
||||
* // Or a single object
|
||||
* const result = await sql`insert into users ${sql(user)} returning *`;
|
||||
* ```
|
||||
*/
|
||||
<T extends { [Key in PropertyKey]: unknown }, Keys extends keyof T = keyof T>(
|
||||
obj: T | T[] | readonly T[],
|
||||
...columns: readonly Keys[]
|
||||
@@ -3682,7 +3651,7 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* If set, the HTTP server will listen on a unix socket instead of a port.
|
||||
* (Cannot be used with hostname+port)
|
||||
* (Cannot use unix with port + hostname)
|
||||
*/
|
||||
unix?: never;
|
||||
|
||||
@@ -3705,9 +3674,21 @@ declare module "bun" {
|
||||
interface UnixServeOptions extends GenericServeOptions {
|
||||
/**
|
||||
* If set, the HTTP server will listen on a unix socket instead of a port.
|
||||
* (Cannot be used with hostname+port)
|
||||
*/
|
||||
unix: string;
|
||||
|
||||
/**
|
||||
* If set, the HTTP server will listen on this port
|
||||
* (Cannot use port with unix)
|
||||
*/
|
||||
port?: never;
|
||||
|
||||
/**
|
||||
* If set, the HTTP server will listen on this hostname
|
||||
* (Cannot use hostname with unix)
|
||||
*/
|
||||
hostname?: never;
|
||||
|
||||
/**
|
||||
* Handle HTTP requests
|
||||
*
|
||||
@@ -4635,7 +4616,7 @@ declare module "bun" {
|
||||
*
|
||||
* @param path The path to the file as a byte buffer (the buffer is copied) if the path starts with `s3://` it will behave like {@link S3File}
|
||||
*/
|
||||
function file(path: ArrayBufferLike | Uint8Array, options?: BlobPropertyBag): BunFile;
|
||||
function file(path: ArrayBufferLike | Uint8Array<ArrayBuffer>, options?: BlobPropertyBag): BunFile;
|
||||
|
||||
/**
|
||||
* [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) powered by the fastest system calls available for operating on files.
|
||||
@@ -4658,7 +4639,7 @@ declare module "bun" {
|
||||
*
|
||||
* This can be 3.5x faster than `new Uint8Array(size)`, but if you send uninitialized memory to your users (even unintentionally), it can potentially leak anything recently in memory.
|
||||
*/
|
||||
function allocUnsafe(size: number): Uint8Array;
|
||||
function allocUnsafe(size: number): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Options for `Bun.inspect`
|
||||
@@ -4941,7 +4922,7 @@ declare module "bun" {
|
||||
*
|
||||
* To close the file, set the array to `null` and it will be garbage collected eventually.
|
||||
*/
|
||||
function mmap(path: PathLike, opts?: MMapOptions): Uint8Array;
|
||||
function mmap(path: PathLike, opts?: MMapOptions): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Write to stdout
|
||||
@@ -4971,8 +4952,8 @@ declare module "bun" {
|
||||
| { r: number; g: number; b: number; a?: number }
|
||||
| [number, number, number]
|
||||
| [number, number, number, number]
|
||||
| Uint8Array
|
||||
| Uint8ClampedArray
|
||||
| Uint8Array<ArrayBuffer>
|
||||
| Uint8ClampedArray<ArrayBuffer>
|
||||
| Float32Array
|
||||
| Float64Array
|
||||
| string
|
||||
@@ -5095,7 +5076,7 @@ declare module "bun" {
|
||||
*
|
||||
* **The input buffer must not be garbage collected**. That means you will need to hold on to it for the duration of the string's lifetime.
|
||||
*/
|
||||
function arrayBufferToString(buffer: Uint8Array | ArrayBufferLike): string;
|
||||
function arrayBufferToString(buffer: Uint8Array<ArrayBuffer> | ArrayBufferLike): string;
|
||||
|
||||
/**
|
||||
* Cast bytes to a `String` without copying. This is the fastest way to get a `String` from a `Uint16Array`
|
||||
@@ -5644,9 +5625,9 @@ declare module "bun" {
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function deflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
data: Uint8Array<ArrayBuffer> | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
/**
|
||||
* Compresses a chunk of data with `zlib` GZIP algorithm.
|
||||
* @param data The buffer of data to compress
|
||||
@@ -5654,27 +5635,27 @@ declare module "bun" {
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function gzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
data: Uint8Array<ArrayBuffer> | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` INFLATE algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function inflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
data: Uint8Array<ArrayBuffer> | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` GUNZIP algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function gunzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
data: Uint8Array<ArrayBuffer> | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
|
||||
@@ -6651,7 +6632,7 @@ declare module "bun" {
|
||||
interface BinaryTypeList {
|
||||
arraybuffer: ArrayBuffer;
|
||||
buffer: Buffer;
|
||||
uint8array: Uint8Array;
|
||||
uint8array: Uint8Array<ArrayBuffer>;
|
||||
// TODO: DataView
|
||||
// dataview: DataView;
|
||||
}
|
||||
@@ -6829,6 +6810,7 @@ declare module "bun" {
|
||||
* The unix socket to listen on or connect to
|
||||
*/
|
||||
unix: string;
|
||||
|
||||
/**
|
||||
* TLS Configuration with which to create the socket
|
||||
*/
|
||||
@@ -7223,7 +7205,7 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
type ReadableToIO<X extends Readable> = X extends "pipe" | undefined
|
||||
? ReadableStream<Uint8Array>
|
||||
? ReadableStream<Uint8Array<ArrayBuffer>>
|
||||
: X extends BunFile | ArrayBufferView | number
|
||||
? number
|
||||
: undefined;
|
||||
|
||||
58
packages/bun-types/deprecated.d.ts
vendored
58
packages/bun-types/deprecated.d.ts
vendored
@@ -1,4 +1,57 @@
|
||||
declare module "bun" {
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single {@link ArrayBuffer}.
|
||||
*
|
||||
* Each chunk must be a TypedArray or an ArrayBuffer. If you need to support
|
||||
* chunks of different types, consider {@link readableStreamToBlob}
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks or the concatenated chunks as a {@link Uint8Array}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.bytes}
|
||||
*/
|
||||
function readableStreamToBytes(
|
||||
stream: ReadableStream<ArrayBufferView | ArrayBufferLike>,
|
||||
): Promise<Uint8Array<ArrayBuffer>> | Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single {@link Blob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link Blob}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.blob}
|
||||
*/
|
||||
function readableStreamToBlob(stream: ReadableStream): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single string. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.text}
|
||||
*/
|
||||
function readableStreamToText(stream: ReadableStream): Promise<string>;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
* Concatenate the chunks into a single string and parse as JSON. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.json}
|
||||
*/
|
||||
function readableStreamToJSON(stream: ReadableStream): Promise<any>;
|
||||
|
||||
interface BunMessageEvent<T> {
|
||||
/**
|
||||
* @deprecated
|
||||
@@ -31,6 +84,9 @@ declare module "bun" {
|
||||
*/
|
||||
type Errorlike = ErrorLike;
|
||||
|
||||
/** @deprecated This is unused in Bun's types and may be removed in the future */
|
||||
type ShellFunction = (input: Uint8Array<ArrayBuffer>) => Uint8Array<ArrayBuffer>;
|
||||
|
||||
interface TLSOptions {
|
||||
/**
|
||||
* File path to a TLS key
|
||||
@@ -59,7 +115,7 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
/** @deprecated This type is unused in Bun's declarations and may be removed in the future */
|
||||
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
|
||||
type ReadableIO = ReadableStream<Uint8Array<ArrayBuffer>> | number | undefined;
|
||||
}
|
||||
|
||||
declare namespace NodeJS {
|
||||
|
||||
22
packages/bun-types/fetch.d.ts
vendored
22
packages/bun-types/fetch.d.ts
vendored
@@ -1,19 +1,21 @@
|
||||
/*
|
||||
|
||||
This file does not declare any global types.
|
||||
|
||||
That should only happen in [./globals.d.ts](./globals.d.ts)
|
||||
so that our documentation generator can pick it up, as it
|
||||
expects all globals to be declared in one file.
|
||||
|
||||
* This file does not declare any global types.
|
||||
*
|
||||
* That should only happen in [./globals.d.ts](./globals.d.ts)
|
||||
* so that our documentation generator can pick it up, as it
|
||||
* expects all globals to be declared in one file.
|
||||
*
|
||||
* This may change in the future, which would be
|
||||
* a nice thing as it would allow us to split up
|
||||
* relevant types into their own files.
|
||||
*/
|
||||
|
||||
declare module "bun" {
|
||||
type HeadersInit = string[][] | Record<string, string | ReadonlyArray<string>> | Headers;
|
||||
type BodyInit =
|
||||
| ReadableStream
|
||||
| Bun.XMLHttpRequestBodyInit
|
||||
| URLSearchParams
|
||||
// Extras that Bun supports:
|
||||
| AsyncIterable<string | ArrayBuffer | ArrayBufferView>
|
||||
| AsyncGenerator<string | ArrayBuffer | ArrayBufferView>
|
||||
| (() => AsyncGenerator<string | ArrayBuffer | ArrayBufferView>);
|
||||
|
||||
@@ -26,7 +28,7 @@ declare module "bun" {
|
||||
? {}
|
||||
: Omit<import("undici-types").RequestInit, "body" | "headers"> & {
|
||||
body?: Bun.BodyInit | null | undefined;
|
||||
headers?: Bun.HeadersInit;
|
||||
headers?: Bun.HeadersInit | undefined;
|
||||
};
|
||||
|
||||
interface BunHeadersOverride extends LibOrFallbackHeaders {
|
||||
|
||||
12
packages/bun-types/globals.d.ts
vendored
12
packages/bun-types/globals.d.ts
vendored
@@ -999,6 +999,7 @@ interface ArrayBuffer {
|
||||
* Read-only. The length of the ArrayBuffer (in bytes).
|
||||
*/
|
||||
readonly byteLength: number;
|
||||
|
||||
/**
|
||||
* Resize an ArrayBuffer in-place.
|
||||
*/
|
||||
@@ -1008,7 +1009,6 @@ interface ArrayBuffer {
|
||||
* Returns a section of an ArrayBuffer.
|
||||
*/
|
||||
slice(begin: number, end?: number): ArrayBuffer;
|
||||
readonly [Symbol.toStringTag]: string;
|
||||
}
|
||||
|
||||
interface SharedArrayBuffer {
|
||||
@@ -1284,7 +1284,7 @@ interface ImportMeta {
|
||||
* )
|
||||
* ```
|
||||
*/
|
||||
readonly main: boolean;
|
||||
main: boolean;
|
||||
|
||||
/** Alias of `import.meta.dir`. Exists for Node.js compatibility */
|
||||
dirname: string;
|
||||
@@ -1426,12 +1426,12 @@ interface Blob {
|
||||
/**
|
||||
* Returns a promise that resolves to the contents of the blob as a Uint8Array (array of bytes) its the same as `new Uint8Array(await blob.arrayBuffer())`
|
||||
*/
|
||||
bytes(): Promise<Uint8Array>;
|
||||
bytes(): Promise<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* Returns a readable stream of the blob's contents
|
||||
*/
|
||||
stream(): ReadableStream<Uint8Array>;
|
||||
stream(): ReadableStream<Uint8Array<ArrayBuffer>>;
|
||||
}
|
||||
|
||||
declare var Blob: Bun.__internal.UseLibDomIfAvailable<
|
||||
@@ -1506,14 +1506,14 @@ interface Uint8ArrayConstructor {
|
||||
alphabet?: "base64" | "base64url";
|
||||
lastChunkHandling?: "loose" | "strict" | "stop-before-partial";
|
||||
},
|
||||
): Uint8Array;
|
||||
): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Create a new Uint8Array from a hex encoded string
|
||||
* @param hex The hex encoded string to convert to a Uint8Array
|
||||
* @returns A new Uint8Array containing the decoded data
|
||||
*/
|
||||
fromHex(hex: string): Uint8Array;
|
||||
fromHex(hex: string): Uint8Array<ArrayBuffer>;
|
||||
}
|
||||
|
||||
interface BroadcastChannel extends Bun.__internal.LibEmptyOrBroadcastChannel {}
|
||||
|
||||
5
packages/bun-types/overrides.d.ts
vendored
5
packages/bun-types/overrides.d.ts
vendored
@@ -6,14 +6,17 @@ declare module "stream/web" {
|
||||
* Consume a ReadableStream as text
|
||||
*/
|
||||
text(): Promise<string>;
|
||||
|
||||
/**
|
||||
* Consume a ReadableStream as a Uint8Array
|
||||
*/
|
||||
bytes(): Promise<Uint8Array>;
|
||||
bytes(): Promise<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* Consume a ReadableStream as JSON
|
||||
*/
|
||||
json(): Promise<any>;
|
||||
|
||||
/**
|
||||
* Consume a ReadableStream as a Blob
|
||||
*/
|
||||
|
||||
@@ -10,10 +10,11 @@
|
||||
},
|
||||
"files": [
|
||||
"./*.d.ts",
|
||||
"docs/**/*.md",
|
||||
"docs/*.md",
|
||||
"CLAUDE.md",
|
||||
"README.md"
|
||||
"./vendor/**/*.d.ts",
|
||||
"./docs/**/*.md",
|
||||
"./docs/*.md",
|
||||
"./CLAUDE.md",
|
||||
"./README.md"
|
||||
],
|
||||
"homepage": "https://bun.com",
|
||||
"dependencies": {
|
||||
@@ -23,8 +24,7 @@
|
||||
"@types/react": "^19"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2"
|
||||
"@types/react": "^19"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "echo $(pwd)",
|
||||
|
||||
4
packages/bun-types/s3.d.ts
vendored
4
packages/bun-types/s3.d.ts
vendored
@@ -487,8 +487,8 @@ declare module "bun" {
|
||||
* // Process text chunk by chunk
|
||||
* }
|
||||
*/
|
||||
readonly readable: ReadableStream;
|
||||
stream(): ReadableStream;
|
||||
readonly readable: ReadableStream<Uint8Array<ArrayBuffer>>;
|
||||
stream(): ReadableStream<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* The name or path of the file in the bucket.
|
||||
|
||||
6
packages/bun-types/shell.d.ts
vendored
6
packages/bun-types/shell.d.ts
vendored
@@ -1,6 +1,4 @@
|
||||
declare module "bun" {
|
||||
type ShellFunction = (input: Uint8Array) => Uint8Array;
|
||||
|
||||
type ShellExpression =
|
||||
| { toString(): string }
|
||||
| Array<ShellExpression>
|
||||
@@ -294,7 +292,7 @@ declare module "bun" {
|
||||
* console.log(output.bytes()); // Uint8Array { byteLength: 6 }
|
||||
* ```
|
||||
*/
|
||||
bytes(): Uint8Array;
|
||||
bytes(): Uint8Array<ArrayBuffer>;
|
||||
}
|
||||
|
||||
interface ShellOutput {
|
||||
@@ -361,7 +359,7 @@ declare module "bun" {
|
||||
* console.log(output.bytes()); // Uint8Array { byteLength: 6 }
|
||||
* ```
|
||||
*/
|
||||
bytes(): Uint8Array;
|
||||
bytes(): Uint8Array<ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Read from stdout as a Blob
|
||||
|
||||
25
packages/bun-types/sqlite.d.ts
vendored
25
packages/bun-types/sqlite.d.ts
vendored
@@ -383,19 +383,28 @@ declare module "bun:sqlite" {
|
||||
* ]);
|
||||
* ```
|
||||
*/
|
||||
transaction(insideTransaction: (...args: any) => void): CallableFunction & {
|
||||
transaction<A extends any[], T>(
|
||||
insideTransaction: (...args: A) => T,
|
||||
): {
|
||||
/**
|
||||
* uses "BEGIN DEFERRED"
|
||||
* Execute the transaction
|
||||
*/
|
||||
deferred: (...args: any) => void;
|
||||
(...args: A): T;
|
||||
|
||||
/**
|
||||
* uses "BEGIN IMMEDIATE"
|
||||
* Execute the transaction using "BEGIN DEFERRED"
|
||||
*/
|
||||
immediate: (...args: any) => void;
|
||||
deferred: (...args: A) => T;
|
||||
|
||||
/**
|
||||
* uses "BEGIN EXCLUSIVE"
|
||||
* Execute the transaction using "BEGIN IMMEDIATE"
|
||||
*/
|
||||
exclusive: (...args: any) => void;
|
||||
immediate: (...args: A) => T;
|
||||
|
||||
/**
|
||||
* Execute the transaction using "BEGIN EXCLUSIVE"
|
||||
*/
|
||||
exclusive: (...args: A) => T;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -664,7 +673,7 @@ declare module "bun:sqlite" {
|
||||
[Symbol.iterator](): IterableIterator<ReturnType>;
|
||||
|
||||
/**
|
||||
* Execute the prepared statement. This returns `undefined`.
|
||||
* Execute the prepared statement.
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the statement is run with the last bound values or no parameters if there are none.
|
||||
*
|
||||
|
||||
1
packages/bun-types/test-globals.d.ts
vendored
1
packages/bun-types/test-globals.d.ts
vendored
@@ -10,6 +10,7 @@ declare var test: typeof import("bun:test").test;
|
||||
declare var it: typeof import("bun:test").it;
|
||||
declare var describe: typeof import("bun:test").describe;
|
||||
declare var expect: typeof import("bun:test").expect;
|
||||
declare var expectTypeOf: typeof import("bun:test").expectTypeOf;
|
||||
declare var beforeAll: typeof import("bun:test").beforeAll;
|
||||
declare var beforeEach: typeof import("bun:test").beforeEach;
|
||||
declare var afterEach: typeof import("bun:test").afterEach;
|
||||
|
||||
27
packages/bun-types/test.d.ts
vendored
27
packages/bun-types/test.d.ts
vendored
@@ -56,6 +56,11 @@ declare module "bun:test" {
|
||||
* Restore the previous value of mocks.
|
||||
*/
|
||||
restore(): void;
|
||||
|
||||
/**
|
||||
* Reset all mock function state (calls, results, etc.) without restoring their original implementation.
|
||||
*/
|
||||
clearAllMocks(): void;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -1642,6 +1647,26 @@ declare module "bun:test" {
|
||||
*/
|
||||
toHaveReturnedTimes(times: number): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned a specific value.
|
||||
* This matcher uses deep equality, like toEqual(), and supports asymmetric matchers.
|
||||
*/
|
||||
toHaveReturnedWith(expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned a specific value on its last invocation.
|
||||
* This matcher uses deep equality, like toEqual(), and supports asymmetric matchers.
|
||||
*/
|
||||
toHaveLastReturnedWith(expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned a specific value on the nth invocation.
|
||||
* This matcher uses deep equality, like toEqual(), and supports asymmetric matchers.
|
||||
* @param n The 1-based index of the function call
|
||||
* @param expected The expected return value
|
||||
*/
|
||||
toHaveNthReturnedWith(n: number, expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
@@ -2182,4 +2207,6 @@ declare module "bun:test" {
|
||||
|
||||
export type UnknownFunction = (...args: unknown[]) => unknown;
|
||||
}
|
||||
|
||||
export const expectTypeOf: typeof import("./vendor/expect-type").expectTypeOf;
|
||||
}
|
||||
|
||||
283
packages/bun-types/vendor/expect-type/branding.d.ts
vendored
Normal file
283
packages/bun-types/vendor/expect-type/branding.d.ts
vendored
Normal file
@@ -0,0 +1,283 @@
|
||||
/*
|
||||
Copyright 2024 Misha Kaletsky
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
*/
|
||||
|
||||
import type { ConstructorOverloadParameters, NumOverloads, OverloadsInfoUnion } from "./overloads";
|
||||
import type {
|
||||
IsAny,
|
||||
IsNever,
|
||||
IsUnknown,
|
||||
MutuallyExtends,
|
||||
OptionalKeys,
|
||||
ReadonlyKeys,
|
||||
RequiredKeys,
|
||||
UnionToTuple,
|
||||
} from "./utils";
|
||||
/**
|
||||
* Represents a deeply branded type.
|
||||
*
|
||||
* Recursively walk a type and replace it with a branded type related to the
|
||||
* original. This is useful for equality-checking stricter than
|
||||
* `A extends B ? B extends A ? true : false : false`, because it detects the
|
||||
* difference between a few edge-case types that vanilla TypeScript
|
||||
* doesn't by default:
|
||||
* - `any` vs `unknown`
|
||||
* - `{ readonly a: string }` vs `{ a: string }`
|
||||
* - `{ a?: string }` vs `{ a: string | undefined }`
|
||||
*
|
||||
* __Note__: not very performant for complex types - this should only be used
|
||||
* when you know you need it. If doing an equality check, it's almost always
|
||||
* better to use {@linkcode StrictEqualUsingTSInternalIdenticalToOperator}.
|
||||
*/
|
||||
export type DeepBrand<T> =
|
||||
IsNever<T> extends true
|
||||
? {
|
||||
type: "never";
|
||||
}
|
||||
: IsAny<T> extends true
|
||||
? {
|
||||
type: "any";
|
||||
}
|
||||
: IsUnknown<T> extends true
|
||||
? {
|
||||
type: "unknown";
|
||||
}
|
||||
: T extends string | number | boolean | symbol | bigint | null | undefined | void
|
||||
? {
|
||||
type: "primitive";
|
||||
value: T;
|
||||
}
|
||||
: T extends new (...args: any[]) => any
|
||||
? {
|
||||
type: "constructor";
|
||||
params: ConstructorOverloadParameters<T>;
|
||||
instance: DeepBrand<InstanceType<Extract<T, new (...args: any) => any>>>;
|
||||
}
|
||||
: T extends (...args: infer P) => infer R
|
||||
? NumOverloads<T> extends 1
|
||||
? {
|
||||
type: "function";
|
||||
params: DeepBrand<P>;
|
||||
return: DeepBrand<R>;
|
||||
this: DeepBrand<ThisParameterType<T>>;
|
||||
props: DeepBrand<Omit<T, keyof Function>>;
|
||||
}
|
||||
: UnionToTuple<OverloadsInfoUnion<T>> extends infer OverloadsTuple
|
||||
? {
|
||||
type: "overloads";
|
||||
overloads: {
|
||||
[K in keyof OverloadsTuple]: DeepBrand<OverloadsTuple[K]>;
|
||||
};
|
||||
}
|
||||
: never
|
||||
: T extends any[]
|
||||
? {
|
||||
type: "array";
|
||||
items: {
|
||||
[K in keyof T]: T[K];
|
||||
};
|
||||
}
|
||||
: {
|
||||
type: "object";
|
||||
properties: {
|
||||
[K in keyof T]: DeepBrand<T[K]>;
|
||||
};
|
||||
readonly: ReadonlyKeys<T>;
|
||||
required: RequiredKeys<T>;
|
||||
optional: OptionalKeys<T>;
|
||||
constructorParams: DeepBrand<ConstructorOverloadParameters<T>>;
|
||||
};
|
||||
/**
|
||||
* Checks if two types are strictly equal using branding.
|
||||
*/
|
||||
export type StrictEqualUsingBranding<Left, Right> = MutuallyExtends<DeepBrand<Left>, DeepBrand<Right>>;
|
||||
1207
packages/bun-types/vendor/expect-type/index.d.ts
vendored
Normal file
1207
packages/bun-types/vendor/expect-type/index.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
395
packages/bun-types/vendor/expect-type/messages.d.ts
vendored
Normal file
395
packages/bun-types/vendor/expect-type/messages.d.ts
vendored
Normal file
@@ -0,0 +1,395 @@
|
||||
/*
|
||||
Copyright 2024 Misha Kaletsky
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
*/
|
||||
|
||||
import type { StrictEqualUsingBranding } from "./branding";
|
||||
import type { And, Extends, ExtendsExcludingAnyOrNever, IsAny, IsNever, IsUnknown, Not, UsefulKeys } from "./utils";
|
||||
/**
|
||||
* Determines the printable type representation for a given type.
|
||||
*/
|
||||
export type PrintType<T> =
|
||||
IsUnknown<T> extends true
|
||||
? "unknown"
|
||||
: IsNever<T> extends true
|
||||
? "never"
|
||||
: IsAny<T> extends true
|
||||
? never
|
||||
: boolean extends T
|
||||
? "boolean"
|
||||
: T extends boolean
|
||||
? `literal boolean: ${T}`
|
||||
: string extends T
|
||||
? "string"
|
||||
: T extends string
|
||||
? `literal string: ${T}`
|
||||
: number extends T
|
||||
? "number"
|
||||
: T extends number
|
||||
? `literal number: ${T}`
|
||||
: bigint extends T
|
||||
? "bigint"
|
||||
: T extends bigint
|
||||
? `literal bigint: ${T}`
|
||||
: T extends null
|
||||
? "null"
|
||||
: T extends undefined
|
||||
? "undefined"
|
||||
: T extends (...args: any[]) => any
|
||||
? "function"
|
||||
: "...";
|
||||
/**
|
||||
* Helper for showing end-user a hint why their type assertion is failing.
|
||||
* This swaps "leaf" types with a literal message about what the actual and
|
||||
* expected types are. Needs to check for `Not<IsAny<Actual>>` because
|
||||
* otherwise `LeafTypeOf<Actual>` returns `never`, which extends everything 🤔
|
||||
*/
|
||||
export type MismatchInfo<Actual, Expected> =
|
||||
And<[Extends<PrintType<Actual>, "...">, Not<IsAny<Actual>>]> extends true
|
||||
? And<[Extends<any[], Actual>, Extends<any[], Expected>]> extends true
|
||||
? Array<MismatchInfo<Extract<Actual, any[]>[number], Extract<Expected, any[]>[number]>>
|
||||
: {
|
||||
[K in UsefulKeys<Actual> | UsefulKeys<Expected>]: MismatchInfo<
|
||||
K extends keyof Actual ? Actual[K] : never,
|
||||
K extends keyof Expected ? Expected[K] : never
|
||||
>;
|
||||
}
|
||||
: StrictEqualUsingBranding<Actual, Expected> extends true
|
||||
? Actual
|
||||
: `Expected: ${PrintType<Expected>}, Actual: ${PrintType<Exclude<Actual, Expected>>}`;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const inverted: unique symbol;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
type Inverted<T> = {
|
||||
[inverted]: T;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectNull: unique symbol;
|
||||
export type ExpectNull<T> = {
|
||||
[expectNull]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, null>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectUndefined: unique symbol;
|
||||
export type ExpectUndefined<T> = {
|
||||
[expectUndefined]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, undefined>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectNumber: unique symbol;
|
||||
export type ExpectNumber<T> = {
|
||||
[expectNumber]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, number>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectString: unique symbol;
|
||||
export type ExpectString<T> = {
|
||||
[expectString]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, string>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectBoolean: unique symbol;
|
||||
export type ExpectBoolean<T> = {
|
||||
[expectBoolean]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, boolean>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectVoid: unique symbol;
|
||||
export type ExpectVoid<T> = {
|
||||
[expectVoid]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, void>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectFunction: unique symbol;
|
||||
export type ExpectFunction<T> = {
|
||||
[expectFunction]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, (...args: any[]) => any>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectObject: unique symbol;
|
||||
export type ExpectObject<T> = {
|
||||
[expectObject]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, object>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectArray: unique symbol;
|
||||
export type ExpectArray<T> = {
|
||||
[expectArray]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, any[]>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectSymbol: unique symbol;
|
||||
export type ExpectSymbol<T> = {
|
||||
[expectSymbol]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, symbol>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectAny: unique symbol;
|
||||
export type ExpectAny<T> = {
|
||||
[expectAny]: T;
|
||||
result: IsAny<T>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectUnknown: unique symbol;
|
||||
export type ExpectUnknown<T> = {
|
||||
[expectUnknown]: T;
|
||||
result: IsUnknown<T>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectNever: unique symbol;
|
||||
export type ExpectNever<T> = {
|
||||
[expectNever]: T;
|
||||
result: IsNever<T>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectNullable: unique symbol;
|
||||
export type ExpectNullable<T> = {
|
||||
[expectNullable]: T;
|
||||
result: Not<StrictEqualUsingBranding<T, NonNullable<T>>>;
|
||||
};
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
declare const expectBigInt: unique symbol;
|
||||
export type ExpectBigInt<T> = {
|
||||
[expectBigInt]: T;
|
||||
result: ExtendsExcludingAnyOrNever<T, bigint>;
|
||||
};
|
||||
/**
|
||||
* Checks if the result of an expecter matches the specified options, and
|
||||
* resolves to a fairly readable error message if not.
|
||||
*/
|
||||
export type Scolder<
|
||||
Expecter extends {
|
||||
result: boolean;
|
||||
},
|
||||
Options extends {
|
||||
positive: boolean;
|
||||
},
|
||||
> = Expecter["result"] extends Options["positive"]
|
||||
? () => true
|
||||
: Options["positive"] extends true
|
||||
? Expecter
|
||||
: Inverted<Expecter>;
|
||||
export {};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user