mirror of
https://github.com/oven-sh/bun
synced 2026-02-22 00:32:02 +00:00
Compare commits
3 Commits
mock-dot-c
...
claude/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f299efe096 | ||
|
|
899e327047 | ||
|
|
9444d24104 |
78
.agent/agent.mjs
Normal file
78
.agent/agent.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -569,7 +569,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,3 +1,50 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
6
.github/workflows/format.yml
vendored
6
.github/workflows/format.yml
vendored
@@ -46,8 +46,9 @@ jobs:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
./scripts/sort-imports.ts src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
@@ -55,7 +56,4 @@ jobs:
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
15
.github/workflows/update-hdrhistogram.yml
vendored
15
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,13 +55,10 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -91,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -99,4 +96,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
|
||||
33
.github/workflows/update-highway.yml
vendored
33
.github/workflows/update-highway.yml
vendored
@@ -50,33 +50,14 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -107,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -115,4 +96,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
|
||||
22
.github/workflows/update-lolhtml.yml
vendored
22
.github/workflows/update-lolhtml.yml
vendored
@@ -50,27 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
27
.github/workflows/update-lshpack.yml
vendored
27
.github/workflows/update-lshpack.yml
vendored
@@ -50,32 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
47
.github/workflows/vscode-release.yml
vendored
47
.github/workflows/vscode-release.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "./build/debug/bun-debug test"
|
||||
"bun.test.customScript": "bun-debug test"
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~5 minutes. Don't timeout, be patient.
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
@@ -160,7 +160,6 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
|
||||
@@ -752,13 +752,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
{
|
||||
const cppImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
|
||||
});
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
|
||||
213
bun.lock
213
bun.lock
@@ -4,9 +4,6 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -90,191 +87,41 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="],
|
||||
|
||||
"@octokit/auth-app": ["@octokit/auth-app@6.1.4", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.1.0", "deprecation": "^2.3.1", "lru-cache": "npm:@wolfy1339/lru-cache@^11.0.2-patch.1", "universal-github-app-jwt": "^1.1.2", "universal-user-agent": "^6.0.0" } }, "sha512-QkXkSOHZK4dA5oUqY5Dk3S+5pN2s1igPjEASNQV8/vgJgW034fQWR16u7VsNOK/EljA00eyjYF5mWNxWKWhHRQ=="],
|
||||
|
||||
"@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@7.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "@types/btoa-lite": "^1.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA=="],
|
||||
|
||||
"@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@6.1.0", "", { "dependencies": { "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw=="],
|
||||
|
||||
"@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@4.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ=="],
|
||||
|
||||
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated": ["@octokit/auth-unauthenticated@5.0.1", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^12.0.0" } }, "sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg=="],
|
||||
|
||||
"@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="],
|
||||
|
||||
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
|
||||
|
||||
"@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
|
||||
|
||||
"@octokit/oauth-app": ["@octokit/oauth-app@6.1.0", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.0.0", "@octokit/auth-oauth-user": "^4.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/oauth-methods": "^4.0.0", "@types/aws-lambda": "^8.10.83", "universal-user-agent": "^6.0.0" } }, "sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g=="],
|
||||
|
||||
"@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@6.0.2", "", {}, "sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA=="],
|
||||
|
||||
"@octokit/oauth-methods": ["@octokit/oauth-methods@4.1.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0" } }, "sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw=="],
|
||||
|
||||
"@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@octokit/plugin-paginate-graphql": ["@octokit/plugin-paginate-graphql@4.0.1", "", { "peerDependencies": { "@octokit/core": ">=5" } }, "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@11.4.4-cjs.2", "", { "dependencies": { "@octokit/types": "^13.7.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1", "", { "dependencies": { "@octokit/types": "^13.8.0" }, "peerDependencies": { "@octokit/core": "^5" } }, "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ=="],
|
||||
|
||||
"@octokit/plugin-retry": ["@octokit/plugin-retry@6.1.0", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-WrO3bvq4E1Xh1r2mT9w6SDFg01gFmP81nIG77+p/MqW1JeXXgL++6umim3t6x0Zj5pZm3rXAN+0HEjmmdhIRig=="],
|
||||
|
||||
"@octokit/plugin-throttling": ["@octokit/plugin-throttling@8.2.0", "", { "dependencies": { "@octokit/types": "^12.2.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "^5.0.0" } }, "sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ=="],
|
||||
|
||||
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
|
||||
|
||||
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@octokit/webhooks": ["@octokit/webhooks@12.3.2", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/webhooks-methods": "^4.1.0", "@octokit/webhooks-types": "7.6.1", "aggregate-error": "^3.1.0" } }, "sha512-exj1MzVXoP7xnAcAB3jZ97pTvVPkQF9y6GA/dvYC47HV7vLv+24XRS6b/v/XnyikpEuvMhugEXdGtAlU086WkQ=="],
|
||||
|
||||
"@octokit/webhooks-methods": ["@octokit/webhooks-methods@5.1.1", "", {}, "sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg=="],
|
||||
|
||||
"@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="],
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.3", "", {}, "sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
"@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="],
|
||||
|
||||
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
|
||||
|
||||
"@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.10", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bottleneck": ["bottleneck@2.19.5", "", {}, "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw=="],
|
||||
|
||||
"btoa-lite": ["btoa-lite@1.0.0", "", {}, "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA=="],
|
||||
|
||||
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||
|
||||
"bun-tracestrings": ["bun-tracestrings@github:oven-sh/bun.report#912ca63", { "dependencies": { "@octokit/webhooks-methods": "^5.1.0", "@sentry/types": "^7.112.2", "@types/bun": "^1.2.6", "html-minifier": "^4.0.0", "lightningcss": "^1.24.1", "marked": "^12.0.1", "octokit": "^3.2.0", "prettier": "^3.2.5", "typescript": "^5.0.0" }, "bin": { "ci-remap-server": "./bin/ci-remap-server.ts" } }, "oven-sh-bun.report-912ca63"],
|
||||
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types"],
|
||||
|
||||
"camel-case": ["camel-case@3.0.0", "", { "dependencies": { "no-case": "^2.2.0", "upper-case": "^1.1.1" } }, "sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w=="],
|
||||
"camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"capital-case": ["capital-case@1.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A=="],
|
||||
|
||||
"change-case": ["change-case@4.1.2", "", { "dependencies": { "camel-case": "^4.1.2", "capital-case": "^1.0.4", "constant-case": "^3.0.4", "dot-case": "^3.0.4", "header-case": "^2.0.4", "no-case": "^3.0.4", "param-case": "^3.0.4", "pascal-case": "^3.1.2", "path-case": "^3.0.4", "sentence-case": "^3.0.4", "snake-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A=="],
|
||||
|
||||
"clean-css": ["clean-css@4.2.4", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A=="],
|
||||
|
||||
"clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="],
|
||||
|
||||
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
|
||||
"constant-case": ["constant-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case": "^2.0.2" } }, "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
|
||||
|
||||
"dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="],
|
||||
|
||||
"ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
|
||||
|
||||
"esbuild": ["esbuild@0.21.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.21.5", "@esbuild/android-arm": "0.21.5", "@esbuild/android-arm64": "0.21.5", "@esbuild/android-x64": "0.21.5", "@esbuild/darwin-arm64": "0.21.5", "@esbuild/darwin-x64": "0.21.5", "@esbuild/freebsd-arm64": "0.21.5", "@esbuild/freebsd-x64": "0.21.5", "@esbuild/linux-arm": "0.21.5", "@esbuild/linux-arm64": "0.21.5", "@esbuild/linux-ia32": "0.21.5", "@esbuild/linux-loong64": "0.21.5", "@esbuild/linux-mips64el": "0.21.5", "@esbuild/linux-ppc64": "0.21.5", "@esbuild/linux-riscv64": "0.21.5", "@esbuild/linux-s390x": "0.21.5", "@esbuild/linux-x64": "0.21.5", "@esbuild/netbsd-x64": "0.21.5", "@esbuild/openbsd-x64": "0.21.5", "@esbuild/sunos-x64": "0.21.5", "@esbuild/win32-arm64": "0.21.5", "@esbuild/win32-ia32": "0.21.5", "@esbuild/win32-x64": "0.21.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw=="],
|
||||
|
||||
"he": ["he@1.2.0", "", { "bin": { "he": "bin/he" } }, "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw=="],
|
||||
|
||||
"header-case": ["header-case@2.0.4", "", { "dependencies": { "capital-case": "^1.0.4", "tslib": "^2.0.3" } }, "sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q=="],
|
||||
|
||||
"html-minifier": ["html-minifier@4.0.0", "", { "dependencies": { "camel-case": "^3.0.0", "clean-css": "^4.2.1", "commander": "^2.19.0", "he": "^1.2.0", "param-case": "^2.1.1", "relateurl": "^0.2.7", "uglify-js": "^3.5.1" }, "bin": { "html-minifier": "./cli.js" } }, "sha512-aoGxanpFPLg7MkIl/DDFYtb0iWz7jMFGqFhvEDZga6/4QTjneiD8I/NXL1x5aaoCp7FSIT6h/OhykDdPsbtMig=="],
|
||||
|
||||
"indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"jsonwebtoken": ["jsonwebtoken@9.0.2", "", { "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ=="],
|
||||
|
||||
"jwa": ["jwa@1.4.2", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw=="],
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
|
||||
|
||||
"lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
|
||||
|
||||
"lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
|
||||
|
||||
"lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
|
||||
|
||||
"lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
|
||||
|
||||
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lower-case": ["lower-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg=="],
|
||||
|
||||
"lru-cache": ["@wolfy1339/lru-cache@11.0.2-patch.1", "", {}, "sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA=="],
|
||||
|
||||
"marked": ["marked@12.0.2", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-qXUm7e/YKFoqFPYPa3Ukg9xlI5cyAtGmyEIzMfW//m6kXwCy2Ps9DYf5ioijFKQ8qyuscrHoY04iJGctu2Kg0Q=="],
|
||||
|
||||
"mitata": ["mitata@0.1.14", "", {}, "sha512-8kRs0l636eT4jj68PFXOR2D5xl4m56T478g16SzUPOYgkzQU+xaw62guAQxzBPm+SXb15GQi1cCpDxJfkr4CSA=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"no-case": ["no-case@3.0.4", "", { "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg=="],
|
||||
|
||||
"octokit": ["octokit@3.2.2", "", { "dependencies": { "@octokit/app": "^14.0.2", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-graphql": "^4.0.0", "@octokit/plugin-paginate-rest": "11.4.4-cjs.2", "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1", "@octokit/plugin-retry": "^6.0.0", "@octokit/plugin-throttling": "^8.0.0", "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "@octokit/webhooks": "^12.3.1" } }, "sha512-7Abo3nADdja8l/aglU6Y3lpnHSfv0tw7gFPiqzry/yCU+2gTAX7R1roJ8hJrxIK+S1j+7iqRJXtmuHJ/UDsBhQ=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"param-case": ["param-case@2.1.1", "", { "dependencies": { "no-case": "^2.2.0" } }, "sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w=="],
|
||||
"param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
|
||||
"pascal-case": ["pascal-case@3.1.2", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g=="],
|
||||
|
||||
@@ -282,76 +129,30 @@
|
||||
|
||||
"peechy": ["peechy@0.4.34", "", { "dependencies": { "change-case": "^4.1.2" }, "bin": { "peechy": "cli.js" } }, "sha512-Cpke/cCqqZHhkyxz7mdqS8ZAGJFUi5icu3ZGqxm9GC7g2VrhH0tmjPhZoWHAN5ghw1m1wq5+2YvfbDSqgC4+Zg=="],
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
"prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.1.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0" }, "optionalPeers": ["vue-tsc"] }, "sha512-5aWRdCgv645xaa58X8lOxzZoiHAldAPChljr/MT0crXVOWTZ+Svl4hIWlz+niYSlO6ikE5UXkN1JrRvIP2ut0A=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"relateurl": ["relateurl@0.2.7", "", {}, "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="],
|
||||
|
||||
"snake-case": ["snake-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg=="],
|
||||
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
"upper-case": ["upper-case@1.1.3", "", {}, "sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA=="],
|
||||
"upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
|
||||
"upper-case-first": ["upper-case-first@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"@octokit/app/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
|
||||
|
||||
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"change-case/param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
|
||||
"constant-case/upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
|
||||
"param-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"camel-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
|
||||
"param-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
|
||||
@@ -350,7 +350,6 @@ src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
|
||||
@@ -8,7 +8,6 @@ src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
@@ -18,4 +17,3 @@ src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
|
||||
@@ -29,7 +29,6 @@ src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/linux_memfd_allocator.zig
|
||||
src/allocators/max_heap_allocator.zig
|
||||
src/allocators/memory_allocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc_arena.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/asan.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
@@ -35,30 +33,20 @@ src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/bake.zig
|
||||
src/baby_list.zig
|
||||
src/bake/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bit_set.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun.js.zig
|
||||
src/bun_js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/dns_resolver.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
@@ -108,7 +96,6 @@ src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -293,6 +280,7 @@ src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
@@ -373,11 +361,6 @@ src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -526,6 +509,7 @@ src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/exact_size_matcher.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
@@ -537,8 +521,10 @@ src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/grapheme.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
@@ -546,7 +532,6 @@ src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/ETag.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
@@ -634,11 +619,6 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/install/yarn.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
@@ -647,12 +627,14 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/json_parser.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
@@ -664,6 +646,7 @@ src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/multi_array_list.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
@@ -705,9 +688,6 @@ src/s3/multipart_options.zig
|
||||
src/s3/multipart.zig
|
||||
src/s3/simple_request.zig
|
||||
src/s3/storage_class.zig
|
||||
src/safety.zig
|
||||
src/safety/alloc_ptr.zig
|
||||
src/safety/CriticalSection.zig
|
||||
src/semver.zig
|
||||
src/semver/ExternalString.zig
|
||||
src/semver/SemverObject.zig
|
||||
@@ -841,20 +821,19 @@ src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
@@ -871,6 +850,8 @@ src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/tmp.zig
|
||||
src/toml/toml_lexer.zig
|
||||
src/toml/toml_parser.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
|
||||
@@ -255,10 +255,6 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
|
||||
@@ -312,27 +308,6 @@ set(BUN_JAVASCRIPT_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
)
|
||||
|
||||
set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
COMMENT
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -562,7 +537,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -632,7 +606,6 @@ register_command(
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
bun-cppbind
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -645,6 +618,10 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
@@ -981,16 +958,6 @@ if(APPLE)
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
# Suppress ALL linker warnings on macOS.
|
||||
# The intent is to only suppress linker alignment warnings.
|
||||
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
|
||||
# If you find one, please update this to only be for linker alignment.
|
||||
-Wl,-w
|
||||
)
|
||||
endif()
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
7118f97c26bf0b2f426728b482f86508efc81d02
|
||||
898dc8319355b7e985f68a9819f182aaed61b53a
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
@@ -20,14 +20,11 @@ register_cmake_command(
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_UNZIP=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_LIBB2=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_WIN32_XMLLITE=OFF
|
||||
-DENABLE_LZ4=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
c1f17cd2538417620f60bff70bffe7e68d332aec
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -31,7 +31,13 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
set(MIMALLOC_LIBRARY mimalloc-asan-debug)
|
||||
else()
|
||||
@@ -47,7 +53,6 @@ if(APPLE OR (LINUX AND NOT DEBUG))
|
||||
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
|
||||
endif()
|
||||
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
mimalloc
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 642e2252f6298387edb6d2f991a0408fd0320466)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
@@ -158,7 +158,7 @@ See [Test > Lifecycle](https://bun.com/docs/test/lifecycle) for complete documen
|
||||
|
||||
## Mocks
|
||||
|
||||
Create mock functions with the `mock` function.
|
||||
Create mock functions with the `mock` function. Mocks are automatically reset between tests.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
@@ -20,7 +20,7 @@ this one:
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *jsc.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
@@ -33,7 +33,7 @@ const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
@@ -2,15 +2,16 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const Features = bun.analytics.Features;
|
||||
const C = bun.C;
|
||||
const Features = @import("../src/analytics/analytics_thread.zig").Features;
|
||||
|
||||
// zig run --main-pkg-path ../ ./features.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
|
||||
@@ -686,7 +686,7 @@ def add(debugger, *, category, regex=False, type, identifier=None, synth=False,
|
||||
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
|
||||
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
|
||||
|
||||
def MultiArrayList_Entry(type): return 'multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
|
||||
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
|
||||
|
||||
def __lldb_init_module(debugger, _=None):
|
||||
# Initialize Zig Categories
|
||||
@@ -701,8 +701,8 @@ def __lldb_init_module(debugger, _=None):
|
||||
# Initialize Zig Standard Library
|
||||
add(debugger, category='zig.std', type='mem.Allocator', summary='${var.ptr}')
|
||||
add(debugger, category='zig.std', regex=True, type='^segmented_list\\.SegmentedList\\(.*\\)$', identifier='std_SegmentedList', synth=True, expand=True, summary='len=${var.len}')
|
||||
add(debugger, category='zig.std', regex=True, type='multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type=MultiArrayList_Entry('.*'), identifier='std_Entry', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)$', identifier='std_HashMapUnmanaged', synth=True, expand=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)\\.Entry$', identifier = 'std_Entry', synth=True, inline_children=True, summary=True)
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
// most of this file is copy pasted from other files in misctools
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
const Archive = @import("../src/libarchive/libarchive.zig").Archive;
|
||||
const Zlib = @import("../src/zlib.zig");
|
||||
|
||||
@@ -7,9 +7,6 @@
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -31,8 +28,8 @@
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"build:debug": "bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",
|
||||
|
||||
4
packages/bun-types/test.d.ts
vendored
4
packages/bun-types/test.d.ts
vendored
@@ -56,10 +56,6 @@ declare module "bun:test" {
|
||||
* Restore the previous value of mocks.
|
||||
*/
|
||||
restore(): void;
|
||||
/**
|
||||
* Clear all mock calls.
|
||||
*/
|
||||
clearAllMocks(): void;
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -27,17 +27,11 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
- Run scripts from package.json
|
||||
- Visual lockfile viewer for old binary lockfiles (`bun.lockb`)
|
||||
|
||||
## Bun test runner integration
|
||||
|
||||
Run and debug tests directly from VSCode's Testing panel. The extension automatically discovers test files, shows inline test status, and provides rich error messages with diffs.
|
||||
|
||||

|
||||
|
||||
## In-editor error messages
|
||||
|
||||
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
|
||||
|
||||

|
||||

|
||||
|
||||
<div align="center">
|
||||
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 6.7 MiB |
@@ -102,6 +102,8 @@
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ=="],
|
||||
|
||||
"@types/xml2js": ["@types/xml2js@0.4.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ=="],
|
||||
|
||||
"@vscode/debugadapter": ["@vscode/debugadapter@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-VDGLUFDVAdnftUebZe4uQCIFUbJ7rTc2Grps4D/CXl+qyzTZSQLv5VADEOZ6kBYG4SvlnMLql5vPQ0G6XvUCvQ=="],
|
||||
|
||||
"@vscode/debugadapter-testsupport": ["@vscode/debugadapter-testsupport@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-M/8aNX1aFvupd+SP0NLEVLKUK9y52BuCK5vKO2gzdpSoRUR2fR8oFbGkTie+/p2Yrcswnuf7hFx0xWkV9avRdg=="],
|
||||
|
||||
|
Before Width: | Height: | Size: 462 KiB After Width: | Height: | Size: 462 KiB |
@@ -10,13 +10,15 @@
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.1.10",
|
||||
"@types/vscode": "^1.60.0",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@vscode/debugadapter": "^1.56.0",
|
||||
"@vscode/debugadapter-testsupport": "^1.56.0",
|
||||
"@vscode/test-cli": "^0.0.10",
|
||||
"@vscode/test-electron": "^2.4.1",
|
||||
"@vscode/vsce": "^2.20.1",
|
||||
"esbuild": "^0.19.2",
|
||||
"typescript": "^5.0.0"
|
||||
"typescript": "^5.0.0",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"activationEvents": [
|
||||
"onStartupFinished"
|
||||
@@ -71,7 +73,7 @@
|
||||
},
|
||||
"bun.test.filePattern": {
|
||||
"type": "string",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts,cjs,mjs}",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
"description": "Glob pattern to find test files"
|
||||
},
|
||||
"bun.test.customFlag": {
|
||||
@@ -81,14 +83,8 @@
|
||||
},
|
||||
"bun.test.customScript": {
|
||||
"type": "string",
|
||||
"default": "bun test",
|
||||
"default": "",
|
||||
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
|
||||
},
|
||||
"bun.test.enable": {
|
||||
"type": "boolean",
|
||||
"description": "If the test explorer should be enabled and integrated with your editor",
|
||||
"scope": "window",
|
||||
"default": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { MockTestController, MockWorkspaceFolder } from "./vscode-types.mock";
|
||||
import "./vscode.mock";
|
||||
import { makeTestController, makeWorkspaceFolder } from "./vscode.mock";
|
||||
|
||||
const { BunTestController } = await import("../bun-test-controller");
|
||||
|
||||
const mockTestController: MockTestController = makeTestController();
|
||||
const mockWorkspaceFolder: MockWorkspaceFolder = makeWorkspaceFolder("/test/workspace");
|
||||
|
||||
const controller = new BunTestController(mockTestController, mockWorkspaceFolder, true);
|
||||
const internal = controller._internal;
|
||||
|
||||
const { expandEachTests, parseTestBlocks, getBraceDepth } = internal;
|
||||
|
||||
describe("BunTestController (static file parser)", () => {
|
||||
describe("expandEachTests", () => {
|
||||
describe("$variable syntax", () => {
|
||||
test("should not expand $variable patterns (Bun behavior)", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1, b: 2, expected: 3 },
|
||||
{ a: 5, b: 5, expected: 10 }
|
||||
])('$a + $b = $expected', ({ a, b, expected }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a + $b = $expected", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a + $b = $expected");
|
||||
});
|
||||
|
||||
test("should not expand string values with quotes", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Alice", city: "NYC" },
|
||||
{ name: "Bob", city: "LA" }
|
||||
])('$name from $city', ({ name, city }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name from $city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name from $city");
|
||||
});
|
||||
|
||||
test("should not expand nested property access", () => {
|
||||
const content = `test.each([
|
||||
{ user: { name: "Alice", profile: { city: "NYC" } } },
|
||||
{ user: { name: "Bob", profile: { city: "LA" } } }
|
||||
])('$user.name from $user.profile.city', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$user.name from $user.profile.city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$user.name from $user.profile.city");
|
||||
});
|
||||
|
||||
test("should not expand array indexing", () => {
|
||||
const content = `test.each([
|
||||
{ users: [{ name: "Alice" }, { name: "Bob" }] },
|
||||
{ users: [{ name: "Carol" }, { name: "Dave" }] }
|
||||
])('first user: $users.0.name', ({ users }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "first user: $users.0.name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("first user: $users.0.name");
|
||||
});
|
||||
|
||||
test("should return template as-is for missing properties", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 }
|
||||
])('$a and $missing', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a and $missing", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a and $missing");
|
||||
});
|
||||
|
||||
test("should handle edge cases with special identifiers", () => {
|
||||
const content = `test.each([
|
||||
{ _valid: "ok", $dollar: "yes", _123mix: "mixed" }
|
||||
])('$_valid | $$dollar | $_123mix', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$_valid | $$dollar | $_123mix", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$_valid | $$dollar | $_123mix");
|
||||
});
|
||||
|
||||
test("should handle invalid identifiers as literals", () => {
|
||||
const content = `test.each([
|
||||
{ valid: "test" }
|
||||
])('$valid | $123invalid | $has-dash', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$valid | $123invalid | $has-dash", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$valid | $123invalid | $has-dash");
|
||||
});
|
||||
});
|
||||
|
||||
describe("% formatters", () => {
|
||||
test("should handle %i for integers", () => {
|
||||
const content = `test.each([
|
||||
[1, 2, 3],
|
||||
[5, 5, 10]
|
||||
])('%i + %i = %i', (a, b, expected) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i + %i = %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1 + 2 = 3");
|
||||
expect(result[1].name).toBe("5 + 5 = 10");
|
||||
});
|
||||
|
||||
test("should handle %s for strings", () => {
|
||||
const content = `test.each([
|
||||
["hello", "world"],
|
||||
["foo", "bar"]
|
||||
])('%s %s', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("hello world");
|
||||
expect(result[1].name).toBe("foo bar");
|
||||
});
|
||||
|
||||
test("should handle %f and %d for numbers", () => {
|
||||
const content = `test.each([
|
||||
[1.5, 2.7],
|
||||
[3.14, 2.71]
|
||||
])('%f and %d', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%f and %d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1.5 and 2.7");
|
||||
expect(result[1].name).toBe("3.14 and 2.71");
|
||||
});
|
||||
|
||||
test("should handle %o and %j for objects", () => {
|
||||
const content = `test.each([
|
||||
[{ a: 1 }, { b: 2 }]
|
||||
])('%o and %j', (obj1, obj2) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%o and %j", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("%o and %j");
|
||||
});
|
||||
|
||||
test("should handle %# for index", () => {
|
||||
const content = `test.each([
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
[5, 6]
|
||||
])('Test #%#: %i + %i', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test #%#: %i + %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("Test #1: 1 + 2");
|
||||
expect(result[1].name).toBe("Test #2: 3 + 4");
|
||||
expect(result[2].name).toBe("Test #3: 5 + 6");
|
||||
});
|
||||
|
||||
test("should handle %% for literal percent", () => {
|
||||
const content = `test.each([
|
||||
[50],
|
||||
[100]
|
||||
])('%i%% complete', (percent) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i%% complete", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("50% complete");
|
||||
expect(result[1].name).toBe("100% complete");
|
||||
});
|
||||
});
|
||||
|
||||
describe("describe.each", () => {
|
||||
test("should work with describe.each", () => {
|
||||
const content = `describe.each([
|
||||
{ module: "fs", method: "readFile" },
|
||||
{ module: "path", method: "join" }
|
||||
])('$module module', ({ module, method }) => {})`;
|
||||
|
||||
const result = expandEachTests("describe.each([", "$module module", content, 0, "describe", 1);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$module module");
|
||||
expect(result[0].type).toBe("describe");
|
||||
});
|
||||
});
|
||||
|
||||
describe("error handling", () => {
|
||||
test("should handle non-.each tests", () => {
|
||||
const result = expandEachTests("test", "regular test", "test('regular test', () => {})", 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("regular test");
|
||||
});
|
||||
|
||||
test("should handle malformed JSON", () => {
|
||||
const content = `test.each([
|
||||
{ invalid json }
|
||||
])('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
|
||||
test("should handle non-array values", () => {
|
||||
const content = `test.each({ not: "array" })('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("mixed formatters", () => {
|
||||
test("should handle both $ and % in objects", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Test", index: 0 }
|
||||
])('$name #%#', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name #%#", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name #%#");
|
||||
});
|
||||
});
|
||||
|
||||
describe("edge cases", () => {
|
||||
test("should handle complex nested objects", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
user: {
|
||||
profile: {
|
||||
address: {
|
||||
city: "NYC",
|
||||
coords: { lat: 40.7128, lng: -74.0060 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('User from $user.profile.address.city at $user.profile.address.coords.lat', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests(
|
||||
"test.each([",
|
||||
"User from $user.profile.address.city at $user.profile.address.coords.lat",
|
||||
content,
|
||||
0,
|
||||
"test",
|
||||
1,
|
||||
);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("User from $user.profile.address.city at $user.profile.address.coords.lat");
|
||||
});
|
||||
|
||||
test("should handle arrays with inline comments", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }, // first test
|
||||
{ a: 2 }, // second test
|
||||
// { a: 3 }, // commented out test
|
||||
{ a: 4 } /* final test */
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with multiline comments", () => {
|
||||
const content = `test.each([
|
||||
{ name: "test1" },
|
||||
/* This is a
|
||||
multiline comment
|
||||
that spans several lines */
|
||||
{ name: "test2" },
|
||||
/**
|
||||
* JSDoc style comment
|
||||
* with multiple lines
|
||||
*/
|
||||
{ name: "test3" }
|
||||
])('$name', ({ name }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name");
|
||||
});
|
||||
|
||||
test("should handle malformed array syntax gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2,,, }, // extra commas
|
||||
{ a: 3, }, // trailing comma
|
||||
{ a: 4 },,, // extra trailing commas
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle strings with comment-like content", () => {
|
||||
const content = `test.each([
|
||||
{ comment: "// this is not a comment" },
|
||||
{ comment: "/* neither is this */" },
|
||||
{ url: "https://example.com/path" }
|
||||
])('Test: $comment $url', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test: $comment $url", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Test: $comment $url");
|
||||
});
|
||||
|
||||
test("should handle special characters in strings", () => {
|
||||
const content = `test.each([
|
||||
{ char: "\\n" },
|
||||
{ char: "\\t" },
|
||||
{ char: "\\"" },
|
||||
{ char: "\\'" },
|
||||
{ char: "\\\\" },
|
||||
{ char: "\`" }
|
||||
])('Special char: $char', ({ char }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Special char: $char", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle empty arrays", () => {
|
||||
const content = `test.each([])('should handle empty', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "should handle empty", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("should handle undefined and null values", () => {
|
||||
const content = `test.each([
|
||||
{ value: undefined },
|
||||
{ value: null },
|
||||
{ value: false },
|
||||
{ value: 0 },
|
||||
{ value: "" }
|
||||
])('Value: $value', ({ value }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $value", content, 0, "test", 1);
|
||||
|
||||
if (result.length === 1) {
|
||||
expect(result[0].name).toBe("Value: $value");
|
||||
} else {
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("Value: undefined");
|
||||
expect(result[1].name).toBe("Value: null");
|
||||
expect(result[2].name).toBe("Value: false");
|
||||
expect(result[3].name).toBe("Value: 0");
|
||||
expect(result[4].name).toBe("Value: ");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle circular references gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: "[Circular]" } },
|
||||
{ a: { b: { c: "[Circular]" } } }
|
||||
])('Circular: $a.b', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Circular: $a.b", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Circular: $a.b");
|
||||
});
|
||||
|
||||
test("should handle very long property paths", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
a: {
|
||||
b: {
|
||||
c: {
|
||||
d: {
|
||||
e: {
|
||||
f: {
|
||||
g: "deeply nested"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('Value: $a.b.c.d.e.f.g', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $a.b.c.d.e.f.g", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Value: $a.b.c.d.e.f.g");
|
||||
});
|
||||
|
||||
test("should handle syntax errors in array", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }
|
||||
{ a: 2 } // missing comma
|
||||
{ a: 3 }
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with trailing commas", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 },
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle mixed data types in arrays", () => {
|
||||
const content = `test.each([
|
||||
["string", 123, true, null, undefined],
|
||||
[{ obj: true }, [1, 2, 3], new Date("2024-01-01")]
|
||||
])('test %s %i %s %s %s', (...args) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test %s %i %s %s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test %s %i %s %s %s");
|
||||
});
|
||||
|
||||
test("should handle regex-like strings", () => {
|
||||
const content = `test.each([
|
||||
{ pattern: "/^test.*$/" },
|
||||
{ pattern: "\\\\d{3}-\\\\d{4}" },
|
||||
{ pattern: "[a-zA-Z]+" }
|
||||
])('Pattern: $pattern', ({ pattern }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Pattern: $pattern", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Pattern: $pattern");
|
||||
});
|
||||
|
||||
test("should handle invalid property access gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: null } },
|
||||
{ a: null },
|
||||
{ },
|
||||
{ a: { } }
|
||||
])('Access: $a.b.c.d', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Access: $a.b.c.d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Access: $a.b.c.d");
|
||||
});
|
||||
|
||||
test("should handle object methods and computed properties", () => {
|
||||
const content = `test.each([
|
||||
{ fn: function() {}, method() {}, arrow: () => {} },
|
||||
{ ["computed"]: "value", [Symbol.for("sym")]: "symbol" }
|
||||
])('Object with methods', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Object with methods", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseTestBlocks", () => {
|
||||
test("should parse simple test blocks", () => {
|
||||
const content = `
|
||||
test("should add numbers", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("should multiply numbers", () => {
|
||||
expect(2 * 3).toBe(6);
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should add numbers");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should multiply numbers");
|
||||
expect(result[1].type).toBe("test");
|
||||
});
|
||||
|
||||
test("should parse describe blocks with nested tests", () => {
|
||||
const content = `
|
||||
describe("Math operations", () => {
|
||||
test("addition", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("subtraction", () => {
|
||||
expect(5 - 3).toBe(2);
|
||||
});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Math operations");
|
||||
expect(result[0].type).toBe("describe");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("addition");
|
||||
expect(result[0].children[1].name).toBe("subtraction");
|
||||
});
|
||||
|
||||
test("should handle test modifiers", () => {
|
||||
const content = `
|
||||
test.skip("skipped test", () => {});
|
||||
test.todo("todo test", () => {});
|
||||
test.only("only test", () => {});
|
||||
test.failing("failing test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("skipped test");
|
||||
expect(result[1].name).toBe("todo test");
|
||||
expect(result[2].name).toBe("only test");
|
||||
expect(result[3].name).toBe("failing test");
|
||||
});
|
||||
|
||||
test("should handle conditional tests", () => {
|
||||
const content = `
|
||||
test.if(true)("conditional test", () => {});
|
||||
test.skipIf(false)("skip if test", () => {});
|
||||
test.todoIf(true)("todo if test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("conditional test");
|
||||
expect(result[1].name).toBe("skip if test");
|
||||
expect(result[2].name).toBe("todo if test");
|
||||
});
|
||||
|
||||
test("should ignore comments", () => {
|
||||
const content = `
|
||||
// This is a comment with test("fake test", () => {})
|
||||
/* Multi-line comment
|
||||
test("another fake test", () => {})
|
||||
*/
|
||||
test("real test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("real test");
|
||||
});
|
||||
|
||||
test("should handle nested describe blocks", () => {
|
||||
const content = `
|
||||
describe("Outer", () => {
|
||||
describe("Inner", () => {
|
||||
test("deeply nested", () => {});
|
||||
});
|
||||
test("shallow test", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Outer");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("Inner");
|
||||
expect(result[0].children[0].children).toHaveLength(1);
|
||||
expect(result[0].children[0].children[0].name).toBe("deeply nested");
|
||||
expect(result[0].children[1].name).toBe("shallow test");
|
||||
});
|
||||
|
||||
test("should handle it() as alias for test()", () => {
|
||||
const content = `
|
||||
it("should work with it", () => {});
|
||||
it.skip("should skip with it", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should work with it");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should skip with it");
|
||||
});
|
||||
|
||||
test("should handle different quote types", () => {
|
||||
const content = `
|
||||
test('single quotes', () => {});
|
||||
test("double quotes", () => {});
|
||||
test(\`template literals\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("single quotes");
|
||||
expect(result[1].name).toBe("double quotes");
|
||||
expect(result[2].name).toBe("template literals");
|
||||
});
|
||||
|
||||
test("should handle escaped quotes in test names", () => {
|
||||
const content = `
|
||||
test("test with \\"escaped\\" quotes", () => {});
|
||||
test('test with \\'escaped\\' quotes', () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe('test with "escaped" quotes');
|
||||
expect(result[1].name).toBe("test with 'escaped' quotes");
|
||||
});
|
||||
|
||||
test("should handle comments within test names", () => {
|
||||
const content = `
|
||||
test("test with // comment syntax", () => {});
|
||||
test("test with /* comment */ syntax", () => {});
|
||||
test("test with URL https://example.com", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const hasCommentSyntax = result.some(r => r.name.includes("comment syntax"));
|
||||
const hasURL = result.some(r => r.name.includes("https://example.com"));
|
||||
|
||||
expect(hasCommentSyntax || hasURL).toBe(true);
|
||||
});
|
||||
|
||||
test("should ignore code that looks like tests in strings", () => {
|
||||
const content = `
|
||||
const str = "test('fake test', () => {})";
|
||||
const template = \`describe("fake describe", () => {})\`;
|
||||
|
||||
// Real test
|
||||
test("real test", () => {
|
||||
const example = 'test("nested fake", () => {})';
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result.some(r => r.name === "real test")).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle tests with complex modifier chains", () => {
|
||||
const content = `
|
||||
test.skip.failing("skipped failing test", () => {});
|
||||
test.only.todo("only todo test", () => {});
|
||||
describe.skip.each([1, 2])("skip each %i", (n) => {});
|
||||
it.failing.each([{a: 1}])("failing each $a", ({a}) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("should handle weird spacing and formatting", () => {
|
||||
const content = `
|
||||
test ( "extra spaces" , ( ) => { } ) ;
|
||||
test
|
||||
(
|
||||
"multiline test"
|
||||
,
|
||||
(
|
||||
)
|
||||
=>
|
||||
{
|
||||
}
|
||||
)
|
||||
;
|
||||
test\t(\t"tabs"\t,\t()\t=>\t{}\t);
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("extra spaces");
|
||||
expect(result[1].name).toBe("multiline test");
|
||||
expect(result[2].name).toBe("tabs");
|
||||
});
|
||||
|
||||
test("should handle test.each with complex patterns", () => {
|
||||
const content = `
|
||||
test.each([
|
||||
[1, 2, 3],
|
||||
[4, 5, 9]
|
||||
])("when %i + %i, result should be %i", (a, b, expected) => {});
|
||||
|
||||
describe.each([
|
||||
{ db: "postgres" },
|
||||
{ db: "mysql" }
|
||||
])("Database $db", ({ db }) => {
|
||||
test("should connect", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("when 1 + 2, result should be 3");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("when 4 + 5, result should be 9");
|
||||
expect(result[1].type).toBe("test");
|
||||
expect(result[2].name).toBe("Database $db");
|
||||
expect(result[2].type).toBe("describe");
|
||||
});
|
||||
|
||||
test("should handle Unicode and emoji in test names", () => {
|
||||
const content = `
|
||||
test("测试中文", () => {});
|
||||
test("テスト日本語", () => {});
|
||||
test("тест русский", () => {});
|
||||
test("🚀 rocket test", () => {});
|
||||
test("Test with 🎉 celebration", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("测试中文");
|
||||
expect(result[1].name).toBe("テスト日本語");
|
||||
expect(result[2].name).toBe("тест русский");
|
||||
expect(result[3].name).toBe("🚀 rocket test");
|
||||
expect(result[4].name).toBe("Test with 🎉 celebration");
|
||||
});
|
||||
|
||||
test("should handle test names with interpolation-like syntax", () => {
|
||||
const content = `
|
||||
test("test with \${variable}", () => {});
|
||||
test("test with \$dollar", () => {});
|
||||
test("test with %percent", () => {});
|
||||
test(\`template literal test\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("test with ${variable}");
|
||||
expect(result[1].name).toBe("test with $dollar");
|
||||
expect(result[2].name).toBe("test with %percent");
|
||||
expect(result[3].name).toBe("template literal test");
|
||||
});
|
||||
|
||||
test("should handle async/await in test definitions", () => {
|
||||
const content = `
|
||||
test("sync test", () => {});
|
||||
test("async test", async () => {});
|
||||
test("test with await", async () => {
|
||||
await something();
|
||||
});
|
||||
it("async it", async function() {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("sync test");
|
||||
expect(result[1].name).toBe("async test");
|
||||
expect(result[2].name).toBe("test with await");
|
||||
expect(result[3].name).toBe("async it");
|
||||
});
|
||||
|
||||
test("should handle generator functions and other ES6+ syntax", () => {
|
||||
const content = `
|
||||
test("generator test", function* () {
|
||||
yield 1;
|
||||
});
|
||||
|
||||
test.each\`
|
||||
a | b | expected
|
||||
\${1} | \${1} | \${2}
|
||||
\${1} | \${2} | \${3}
|
||||
\`('$a + $b = $expected', ({ a, b, expected }) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result[0].name).toBe("generator test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBraceDepth", () => {
|
||||
test("should count braces correctly", () => {
|
||||
const content = "{ { } }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
expect(getBraceDepth(content, 0, 3)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 5)).toBe(1);
|
||||
});
|
||||
|
||||
test("should ignore braces in strings", () => {
|
||||
const content = '{ "string with { braces }" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should ignore braces in template literals", () => {
|
||||
const content = "{ `template with { braces }` }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle escaped quotes", () => {
|
||||
const content = '{ "escaped \\" quote" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle mixed quotes", () => {
|
||||
const content = `{ "double" + 'single' + \`template\` }`;
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle nested braces", () => {
|
||||
const content = "{ a: { b: { c: 1 } } }";
|
||||
expect(getBraceDepth(content, 0, 10)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 15)).toBe(3);
|
||||
});
|
||||
|
||||
test("should handle complex template literals", () => {
|
||||
const content = '{ `${foo({ bar: "baz" })} and ${nested.value}` }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle edge cases", () => {
|
||||
expect(getBraceDepth("", 0, 0)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{}}}", 0, 6)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{", 0, 3)).toBe(3);
|
||||
expect(getBraceDepth("}}}", 0, 3)).toBe(-3);
|
||||
|
||||
const templateContent = "{ `${foo}` + `${bar}` }";
|
||||
expect(getBraceDepth(templateContent, 0, templateContent.length)).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,570 +0,0 @@
|
||||
/**
|
||||
* Mock VSCode types and classes for testing
|
||||
* These should be as close as possible to the real VSCode API
|
||||
*/
|
||||
|
||||
export interface MockUri {
|
||||
readonly scheme: string;
|
||||
readonly authority: string;
|
||||
readonly path: string;
|
||||
readonly query: string;
|
||||
readonly fragment: string;
|
||||
readonly fsPath: string;
|
||||
toString(): string;
|
||||
}
|
||||
|
||||
export class MockUri implements MockUri {
|
||||
constructor(
|
||||
public readonly scheme: string,
|
||||
public readonly authority: string,
|
||||
public readonly path: string,
|
||||
public readonly query: string,
|
||||
public readonly fragment: string,
|
||||
public readonly fsPath: string,
|
||||
) {}
|
||||
|
||||
static file(path: string): MockUri {
|
||||
return new MockUri("file", "", path, "", "", path);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return `${this.scheme}://${this.authority}${this.path}`;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockPosition {
|
||||
constructor(
|
||||
public readonly line: number,
|
||||
public readonly character: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockRange {
|
||||
constructor(
|
||||
public readonly start: MockPosition,
|
||||
public readonly end: MockPosition,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockLocation {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly range: MockRange,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockTestTag {
|
||||
constructor(public readonly id: string) {}
|
||||
}
|
||||
|
||||
export class MockTestMessage {
|
||||
public location?: MockLocation;
|
||||
public actualOutput?: string;
|
||||
public expectedOutput?: string;
|
||||
|
||||
constructor(public message: string | MockMarkdownString) {}
|
||||
|
||||
static diff(message: string, expected: string, actual: string): MockTestMessage {
|
||||
const msg = new MockTestMessage(message);
|
||||
msg.expectedOutput = expected;
|
||||
msg.actualOutput = actual;
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockMarkdownString {
|
||||
constructor(public value: string = "") {}
|
||||
|
||||
appendCodeblock(code: string, language?: string): MockMarkdownString {
|
||||
this.value += `\n\`\`\`${language || ""}\n${code}\n\`\`\``;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendMarkdown(value: string): MockMarkdownString {
|
||||
this.value += value;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendText(value: string): MockMarkdownString {
|
||||
this.value += value.replace(/[\\`*_{}[\]()#+\-.!]/g, "\\$&");
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestItem {
|
||||
readonly id: string;
|
||||
readonly uri?: MockUri;
|
||||
readonly children: MockTestItemCollection;
|
||||
readonly parent?: MockTestItem;
|
||||
label: string;
|
||||
description?: string;
|
||||
tags: readonly MockTestTag[];
|
||||
canResolveChildren: boolean;
|
||||
busy: boolean;
|
||||
range?: MockRange;
|
||||
error?: string | MockMarkdownString;
|
||||
}
|
||||
|
||||
export interface MockTestItemCollection {
|
||||
readonly size: number;
|
||||
add(item: MockTestItem): void;
|
||||
replace(items: readonly MockTestItem[]): void;
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void;
|
||||
get(itemId: string): MockTestItem | undefined;
|
||||
delete(itemId: string): void;
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]>;
|
||||
}
|
||||
|
||||
export class MockTestItemCollection implements MockTestItemCollection {
|
||||
private items = new Map<string, MockTestItem>();
|
||||
|
||||
get size(): number {
|
||||
return this.items.size;
|
||||
}
|
||||
|
||||
add(item: MockTestItem): void {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
|
||||
replace(items: readonly MockTestItem[]): void {
|
||||
this.items.clear();
|
||||
for (const item of items) {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
}
|
||||
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void {
|
||||
this.items.forEach((item, id) => callback(item, id, this));
|
||||
}
|
||||
|
||||
get(itemId: string): MockTestItem | undefined {
|
||||
return this.items.get(itemId);
|
||||
}
|
||||
|
||||
delete(itemId: string): void {
|
||||
this.items.delete(itemId);
|
||||
}
|
||||
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]> {
|
||||
return this.items[Symbol.iterator]();
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
|
||||
set(id: string, item: MockTestItem): void {
|
||||
this.items.set(id, item);
|
||||
}
|
||||
|
||||
values(): IterableIterator<MockTestItem> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
keys(): IterableIterator<string> {
|
||||
return this.items.keys();
|
||||
}
|
||||
|
||||
entries(): IterableIterator<[string, MockTestItem]> {
|
||||
return this.items.entries();
|
||||
}
|
||||
}
|
||||
|
||||
export class MockTestItem implements MockTestItem {
|
||||
public canResolveChildren: boolean = false;
|
||||
public busy: boolean = false;
|
||||
public description?: string;
|
||||
public range?: MockRange;
|
||||
public error?: string | MockMarkdownString;
|
||||
public readonly children: MockTestItemCollection;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public label: string,
|
||||
public readonly uri?: MockUri,
|
||||
public readonly parent?: MockTestItem,
|
||||
public tags: readonly MockTestTag[] = [],
|
||||
) {
|
||||
this.children = new MockTestItemCollection();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestController {
|
||||
readonly items: MockTestItemCollection;
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem;
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile;
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun;
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void;
|
||||
resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
}
|
||||
|
||||
export class MockTestController implements MockTestController {
|
||||
public readonly items: MockTestItemCollection;
|
||||
public resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
public refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public readonly label: string,
|
||||
) {
|
||||
this.items = new MockTestItemCollection();
|
||||
}
|
||||
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem {
|
||||
return new MockTestItem(id, label, uri);
|
||||
}
|
||||
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile {
|
||||
return new MockTestRunProfile(label, kind, runHandler, isDefault);
|
||||
}
|
||||
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun {
|
||||
return new MockTestRun(name, persist);
|
||||
}
|
||||
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void {
|
||||
// Mock implementation - in real VSCode this would invalidate test results
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export enum MockTestRunProfileKind {
|
||||
Run = 1,
|
||||
Debug = 2,
|
||||
Coverage = 3,
|
||||
}
|
||||
|
||||
export interface MockTestRunProfile {
|
||||
readonly label: string;
|
||||
readonly kind: MockTestRunProfileKind;
|
||||
readonly isDefault: boolean;
|
||||
readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>;
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockTestRunProfile implements MockTestRunProfile {
|
||||
constructor(
|
||||
public readonly label: string,
|
||||
public readonly kind: MockTestRunProfileKind,
|
||||
public readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
public readonly isDefault: boolean = false,
|
||||
) {}
|
||||
|
||||
dispose(): void {
|
||||
// No-op for mock
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestRunRequest {
|
||||
readonly include?: readonly MockTestItem[];
|
||||
readonly exclude?: readonly MockTestItem[];
|
||||
readonly profile?: MockTestRunProfile;
|
||||
}
|
||||
|
||||
export class MockTestRunRequest implements MockTestRunRequest {
|
||||
constructor(
|
||||
public readonly include?: readonly MockTestItem[],
|
||||
public readonly exclude?: readonly MockTestItem[],
|
||||
public readonly profile?: MockTestRunProfile,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockTestRun {
|
||||
readonly name?: string;
|
||||
readonly token: MockCancellationToken;
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void;
|
||||
end(): void;
|
||||
enqueued(test: MockTestItem): void;
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
passed(test: MockTestItem, duration?: number): void;
|
||||
skipped(test: MockTestItem): void;
|
||||
started(test: MockTestItem): void;
|
||||
}
|
||||
|
||||
export class MockTestRun implements MockTestRun {
|
||||
public readonly token: MockCancellationToken;
|
||||
private _ended: boolean = false;
|
||||
|
||||
constructor(
|
||||
public readonly name?: string,
|
||||
public readonly persist: boolean = true,
|
||||
) {
|
||||
this.token = new MockCancellationToken();
|
||||
}
|
||||
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// For mock, just store output - in real VS Code this would appear in test output
|
||||
}
|
||||
|
||||
end(): void {
|
||||
this._ended = true;
|
||||
}
|
||||
|
||||
enqueued(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
passed(test: MockTestItem, duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
skipped(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
started(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockCancellationToken {
|
||||
readonly isCancellationRequested: boolean;
|
||||
onCancellationRequested(listener: () => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockCancellationToken implements MockCancellationToken {
|
||||
private _isCancellationRequested: boolean = false;
|
||||
private _listeners: (() => void)[] = [];
|
||||
|
||||
get isCancellationRequested(): boolean {
|
||||
return this._isCancellationRequested;
|
||||
}
|
||||
|
||||
onCancellationRequested(listener: () => void): MockDisposable {
|
||||
this._listeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._listeners.indexOf(listener);
|
||||
if (index >= 0) {
|
||||
this._listeners.splice(index, 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this._isCancellationRequested = true;
|
||||
this._listeners.forEach(listener => listener());
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockDisposable {
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockDisposable implements MockDisposable {
|
||||
constructor(private readonly disposeFunc?: () => void) {}
|
||||
|
||||
dispose(): void {
|
||||
this.disposeFunc?.();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTextDocument {
|
||||
readonly uri: MockUri;
|
||||
readonly fileName: string;
|
||||
readonly isUntitled: boolean;
|
||||
readonly languageId: string;
|
||||
readonly version: number;
|
||||
readonly isDirty: boolean;
|
||||
readonly isClosed: boolean;
|
||||
readonly eol: MockEndOfLine;
|
||||
readonly lineCount: number;
|
||||
getText(range?: MockRange): string;
|
||||
getWordRangeAtPosition(position: MockPosition, regex?: RegExp): MockRange | undefined;
|
||||
lineAt(line: number | MockPosition): MockTextLine;
|
||||
offsetAt(position: MockPosition): number;
|
||||
positionAt(offset: number): MockPosition;
|
||||
save(): Promise<boolean>;
|
||||
validatePosition(position: MockPosition): MockPosition;
|
||||
validateRange(range: MockRange): MockRange;
|
||||
}
|
||||
|
||||
export enum MockEndOfLine {
|
||||
LF = 1,
|
||||
CRLF = 2,
|
||||
}
|
||||
|
||||
export interface MockTextLine {
|
||||
readonly lineNumber: number;
|
||||
readonly text: string;
|
||||
readonly range: MockRange;
|
||||
readonly rangeIncludingLineBreak: MockRange;
|
||||
readonly firstNonWhitespaceCharacterIndex: number;
|
||||
readonly isEmptyOrWhitespace: boolean;
|
||||
}
|
||||
|
||||
export interface MockWorkspaceFolder {
|
||||
readonly uri: MockUri;
|
||||
readonly name: string;
|
||||
readonly index: number;
|
||||
}
|
||||
|
||||
export class MockWorkspaceFolder implements MockWorkspaceFolder {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly name: string,
|
||||
public readonly index: number = 0,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockFileSystemWatcher extends MockDisposable {
|
||||
readonly ignoreCreateEvents: boolean;
|
||||
readonly ignoreChangeEvents: boolean;
|
||||
readonly ignoreDeleteEvents: boolean;
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockFileSystemWatcher implements MockFileSystemWatcher {
|
||||
public readonly ignoreCreateEvents: boolean = false;
|
||||
public readonly ignoreChangeEvents: boolean = false;
|
||||
public readonly ignoreDeleteEvents: boolean = false;
|
||||
|
||||
private _createListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _changeListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _deleteListeners: ((uri: MockUri) => void)[] = [];
|
||||
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._createListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._createListeners.indexOf(listener);
|
||||
if (index >= 0) this._createListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._changeListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._changeListeners.indexOf(listener);
|
||||
if (index >= 0) this._changeListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._deleteListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._deleteListeners.indexOf(listener);
|
||||
if (index >= 0) this._deleteListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this._createListeners.length = 0;
|
||||
this._changeListeners.length = 0;
|
||||
this._deleteListeners.length = 0;
|
||||
}
|
||||
|
||||
// Helper methods for testing
|
||||
triggerCreate(uri: MockUri): void {
|
||||
this._createListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerChange(uri: MockUri): void {
|
||||
this._changeListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerDelete(uri: MockUri): void {
|
||||
this._deleteListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockRelativePattern {
|
||||
readonly base: string;
|
||||
readonly pattern: string;
|
||||
}
|
||||
|
||||
export class MockRelativePattern implements MockRelativePattern {
|
||||
constructor(
|
||||
public readonly base: string | MockWorkspaceFolder,
|
||||
public readonly pattern: string,
|
||||
) {}
|
||||
|
||||
get baseUri(): MockUri {
|
||||
if (typeof this.base === "string") {
|
||||
return MockUri.file(this.base);
|
||||
}
|
||||
return this.base.uri;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockConfiguration {
|
||||
get<T>(section: string, defaultValue?: T): T | undefined;
|
||||
has(section: string): boolean;
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined;
|
||||
update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void>;
|
||||
}
|
||||
|
||||
export interface MockConfigurationInspect<T> {
|
||||
readonly key: string;
|
||||
readonly defaultValue?: T;
|
||||
readonly globalValue?: T;
|
||||
readonly workspaceValue?: T;
|
||||
readonly workspaceFolderValue?: T;
|
||||
}
|
||||
|
||||
export enum MockConfigurationTarget {
|
||||
Global = 1,
|
||||
Workspace = 2,
|
||||
WorkspaceFolder = 3,
|
||||
}
|
||||
|
||||
export class MockConfiguration implements MockConfiguration {
|
||||
private _values = new Map<string, any>();
|
||||
|
||||
get<T>(section: string, defaultValue?: T): T | undefined {
|
||||
return this._values.get(section) ?? defaultValue;
|
||||
}
|
||||
|
||||
has(section: string): boolean {
|
||||
return this._values.has(section);
|
||||
}
|
||||
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined {
|
||||
return {
|
||||
key: section,
|
||||
defaultValue: undefined,
|
||||
globalValue: this._values.get(section),
|
||||
workspaceValue: undefined,
|
||||
workspaceFolderValue: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void> {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
|
||||
// Helper for testing
|
||||
setValue(section: string, value: any): void {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { mock } from "bun:test";
|
||||
import {
|
||||
MockConfiguration,
|
||||
MockDisposable,
|
||||
MockFileSystemWatcher,
|
||||
MockLocation,
|
||||
MockMarkdownString,
|
||||
MockPosition,
|
||||
MockRange,
|
||||
MockRelativePattern,
|
||||
MockTestController,
|
||||
MockTestMessage,
|
||||
MockTestRunProfileKind,
|
||||
MockTestTag,
|
||||
MockUri,
|
||||
MockWorkspaceFolder,
|
||||
} from "./vscode-types.mock";
|
||||
|
||||
mock.module("vscode", () => ({
|
||||
window: {
|
||||
createOutputChannel: () => ({
|
||||
appendLine: () => {},
|
||||
}),
|
||||
visibleTextEditors: [],
|
||||
},
|
||||
workspace: {
|
||||
getConfiguration: (section?: string) => new MockConfiguration(),
|
||||
onDidOpenTextDocument: () => new MockDisposable(),
|
||||
textDocuments: [],
|
||||
createFileSystemWatcher: (pattern: string | MockRelativePattern) => new MockFileSystemWatcher(),
|
||||
findFiles: async (include: string, exclude?: string, maxResults?: number, token?: any) => {
|
||||
return []; // Mock implementation
|
||||
},
|
||||
},
|
||||
Uri: MockUri,
|
||||
TestTag: MockTestTag,
|
||||
Position: MockPosition,
|
||||
Range: MockRange,
|
||||
Location: MockLocation,
|
||||
TestMessage: MockTestMessage,
|
||||
MarkdownString: MockMarkdownString,
|
||||
TestRunProfileKind: MockTestRunProfileKind,
|
||||
RelativePattern: MockRelativePattern,
|
||||
debug: {
|
||||
addBreakpoints: () => {},
|
||||
startDebugging: async () => true,
|
||||
},
|
||||
}));
|
||||
|
||||
export function makeTestController(): MockTestController {
|
||||
return new MockTestController("test-controller", "Test Controller");
|
||||
}
|
||||
|
||||
export function makeWorkspaceFolder(path: string): MockWorkspaceFolder {
|
||||
return new MockWorkspaceFolder(MockUri.file(path), path.split("/").pop() || "workspace", 0);
|
||||
}
|
||||
@@ -17,7 +17,7 @@ export const debug = vscode.window.createOutputChannel("Bun - Test Runner");
|
||||
|
||||
export type TestNode = {
|
||||
name: string;
|
||||
type: "describe" | "test";
|
||||
type: "describe" | "test" | "it";
|
||||
line: number;
|
||||
children: TestNode[];
|
||||
parent?: TestNode;
|
||||
@@ -51,15 +51,11 @@ export class BunTestController implements vscode.Disposable {
|
||||
private currentRunType: "file" | "individual" = "file";
|
||||
private requestedTestIds: Set<string> = new Set();
|
||||
private discoveredTestIds: Set<string> = new Set();
|
||||
private executedTestCount: number = 0;
|
||||
private totalTestsStarted: number = 0;
|
||||
|
||||
constructor(
|
||||
private readonly testController: vscode.TestController,
|
||||
private readonly workspaceFolder: vscode.WorkspaceFolder,
|
||||
readonly isTest: boolean = false,
|
||||
) {
|
||||
if (isTest) return;
|
||||
this.setupTestController();
|
||||
this.setupWatchers();
|
||||
this.setupOpenDocumentListener();
|
||||
@@ -71,7 +67,10 @@ export class BunTestController implements vscode.Disposable {
|
||||
try {
|
||||
this.signal = await this.createSignal();
|
||||
await this.signal.ready;
|
||||
debug.appendLine(`Signal initialized at: ${this.signal.url}`);
|
||||
|
||||
this.signal.on("Signal.Socket.connect", (socket: net.Socket) => {
|
||||
debug.appendLine("Bun connected to signal socket");
|
||||
this.handleSocketConnection(socket, this.currentRun!);
|
||||
});
|
||||
|
||||
@@ -90,9 +89,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
};
|
||||
|
||||
this.testController.refreshHandler = async token => {
|
||||
const files = await this.discoverInitialTests(token, false);
|
||||
const files = await this.discoverInitialTests(token);
|
||||
if (!files?.length) return;
|
||||
if (token.isCancellationRequested) return;
|
||||
|
||||
const filePaths = new Set(files.map(f => f.fsPath));
|
||||
for (const [, testItem] of this.testController.items) {
|
||||
@@ -136,21 +134,15 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
private isTestFile(document: vscode.TextDocument): boolean {
|
||||
return (
|
||||
document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mjs|mts|cts)$/.test(document.uri.fsPath)
|
||||
);
|
||||
return document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mts)$/.test(document.uri.fsPath);
|
||||
}
|
||||
|
||||
private async discoverInitialTests(
|
||||
cancellationToken?: vscode.CancellationToken,
|
||||
reset: boolean = true,
|
||||
): Promise<vscode.Uri[] | undefined> {
|
||||
private async discoverInitialTests(cancellationToken?: vscode.CancellationToken): Promise<vscode.Uri[] | undefined> {
|
||||
try {
|
||||
const tests = await this.findTestFiles(cancellationToken);
|
||||
this.createFileTestItems(tests, reset);
|
||||
this.createFileTestItems(tests);
|
||||
return tests;
|
||||
} catch (error) {
|
||||
debug.appendLine(`Error in discoverInitialTests: ${error}`);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -187,8 +179,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
const ignoreGlobs = new Set(["**/node_modules/**"]);
|
||||
|
||||
for (const ignore of ignores) {
|
||||
if (cancellationToken?.isCancellationRequested) return [];
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(ignore.fsPath, { encoding: "utf8" });
|
||||
const lines = content
|
||||
@@ -205,15 +195,13 @@ export class BunTestController implements vscode.Disposable {
|
||||
ignoreGlobs.add(path.join(cwd.trim(), line.trim()));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
debug.appendLine(`Error in buildIgnoreGlobs: ${ignore.fsPath}`);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return [...ignoreGlobs.values()];
|
||||
}
|
||||
|
||||
private createFileTestItems(files: vscode.Uri[], reset: boolean = true): void {
|
||||
private createFileTestItems(files: vscode.Uri[]): void {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
}
|
||||
@@ -226,9 +214,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
path.relative(this.workspaceFolder.uri.fsPath, file.fsPath) || file.fsPath,
|
||||
file,
|
||||
);
|
||||
if (reset) {
|
||||
fileTestItem.children.replace([]);
|
||||
}
|
||||
fileTestItem.children.replace([]);
|
||||
fileTestItem.canResolveChildren = true;
|
||||
this.testController.items.add(fileTestItem);
|
||||
}
|
||||
@@ -288,13 +274,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
return { bunCommand, testArgs };
|
||||
}
|
||||
|
||||
private async discoverTests(
|
||||
testItem?: vscode.TestItem | false,
|
||||
filePath?: string,
|
||||
cancellationToken?: vscode.CancellationToken,
|
||||
): Promise<void> {
|
||||
if (cancellationToken?.isCancellationRequested) return;
|
||||
|
||||
private async discoverTests(testItem?: vscode.TestItem | false, filePath?: string): Promise<void> {
|
||||
let targetPath = filePath;
|
||||
if (!targetPath && testItem) {
|
||||
targetPath = testItem?.uri?.fsPath || this.workspaceFolder.uri.fsPath;
|
||||
@@ -317,24 +297,17 @@ export class BunTestController implements vscode.Disposable {
|
||||
);
|
||||
this.testController.items.add(fileTestItem);
|
||||
}
|
||||
if (!this.currentRun) {
|
||||
fileTestItem.children.replace([]);
|
||||
}
|
||||
fileTestItem.children.replace([]);
|
||||
fileTestItem.canResolveChildren = false;
|
||||
|
||||
this.addTestNodes(testNodes, fileTestItem, targetPath);
|
||||
} catch {
|
||||
debug.appendLine(`Error in discoverTests: ${targetPath}`);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
private parseTestBlocks(fileContent: string): TestNode[] {
|
||||
const cleanContent = fileContent
|
||||
.replace(/\/\*[\s\S]*?\*\//g, match => match.replace(/[^\n\r]/g, " "))
|
||||
.replace(/('(?:[^'\\]|\\.)*'|"(?:[^"\\]|\\.)*"|`(?:[^`\\]|\\.)*`)|\/\/.*$/gm, (match, str) => {
|
||||
if (str) return str;
|
||||
return " ".repeat(match.length);
|
||||
});
|
||||
.replace(/\/\/.*$/gm, match => " ".repeat(match.length));
|
||||
|
||||
const testRegex =
|
||||
/\b(describe|test|it)(?:\.(?:skip|todo|failing|only))?(?:\.(?:if|todoIf|skipIf)\s*\([^)]*\))?(?:\.each\s*\([^)]*\))?\s*\(\s*(['"`])((?:\\\2|.)*?)\2\s*(?:,|\))/g;
|
||||
@@ -346,7 +319,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
match = testRegex.exec(cleanContent);
|
||||
while (match !== null) {
|
||||
const [full, type, , name] = match;
|
||||
const _type = type === "it" ? "test" : type;
|
||||
const line = cleanContent.slice(0, match.index).split("\n").length - 1;
|
||||
|
||||
while (
|
||||
@@ -357,14 +329,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
const expandedNodes = this.expandEachTests(
|
||||
full,
|
||||
name,
|
||||
cleanContent,
|
||||
match.index,
|
||||
_type as TestNode["type"],
|
||||
line,
|
||||
);
|
||||
const expandedNodes = this.expandEachTests(full, name, cleanContent, match.index, type as TestNode["type"], line);
|
||||
|
||||
for (const node of expandedNodes) {
|
||||
if (stack.length === 0) {
|
||||
@@ -468,16 +433,16 @@ export class BunTestController implements vscode.Disposable {
|
||||
throw new Error("Not an array");
|
||||
}
|
||||
|
||||
return eachValues.map((val, testIndex) => {
|
||||
let testName = name.replace(/%%/g, "%").replace(/%#/g, (testIndex + 1).toString());
|
||||
return eachValues.map(val => {
|
||||
let testName = name;
|
||||
if (Array.isArray(val)) {
|
||||
let idx = 0;
|
||||
testName = testName.replace(/%[isfdojp#%]/g, () => {
|
||||
testName = testName.replace(/%[isfd]/g, () => {
|
||||
const v = val[idx++];
|
||||
return typeof v === "object" ? JSON.stringify(v) : String(v);
|
||||
});
|
||||
} else {
|
||||
testName = testName.replace(/%[isfdojp#%]/g, () => {
|
||||
testName = testName.replace(/%[isfd]/g, () => {
|
||||
return typeof val === "object" ? JSON.stringify(val) : String(val);
|
||||
});
|
||||
}
|
||||
@@ -510,22 +475,19 @@ export class BunTestController implements vscode.Disposable {
|
||||
: this.escapeTestName(node.name);
|
||||
const testId = `${filePath}#${nodePath}`;
|
||||
|
||||
let testItem = parent.children.get(testId);
|
||||
if (!testItem) {
|
||||
testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
|
||||
const testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
|
||||
|
||||
if (node.type) testItem.tags = [new vscode.TestTag(node.type)];
|
||||
testItem.tags = [new vscode.TestTag(node.type === "describe" ? "describe" : "test")];
|
||||
|
||||
if (typeof node.line === "number") {
|
||||
testItem.range = new vscode.Range(
|
||||
new vscode.Position(node.line, 0),
|
||||
new vscode.Position(node.line, node.name.length),
|
||||
);
|
||||
}
|
||||
|
||||
parent.children.add(testItem);
|
||||
if (typeof node.line === "number") {
|
||||
testItem.range = new vscode.Range(
|
||||
new vscode.Position(node.line, 0),
|
||||
new vscode.Position(node.line, node.name.length),
|
||||
);
|
||||
}
|
||||
|
||||
parent.children.add(testItem);
|
||||
|
||||
if (node.children.length > 0) {
|
||||
this.addTestNodes(node.children, testItem, filePath, nodePath);
|
||||
}
|
||||
@@ -538,7 +500,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
private escapeTestName(source: string): string {
|
||||
return source.replace(/[^\w \-\u0080-\uFFFF]/g, "\\$&");
|
||||
return source.replace(/[^a-zA-Z0-9_\ ]/g, "\\$&");
|
||||
}
|
||||
|
||||
private async createSignal(): Promise<UnixSignal | TCPSocketSignal> {
|
||||
@@ -555,23 +517,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
token: vscode.CancellationToken,
|
||||
isDebug: boolean,
|
||||
): Promise<void> {
|
||||
if (this.currentRun) {
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
if (this.currentRun) {
|
||||
this.currentRun.appendOutput("\n\x1b[33mCancelled: Starting new test run\x1b[0m\n");
|
||||
this.currentRun.end();
|
||||
this.currentRun = null;
|
||||
}
|
||||
}
|
||||
this.totalTestsStarted++;
|
||||
if (this.totalTestsStarted > 15) {
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
this.signal?.close();
|
||||
this.signal = null;
|
||||
}
|
||||
|
||||
const run = this.testController.createTestRun(request);
|
||||
|
||||
token.onCancellationRequested(() => {
|
||||
@@ -580,14 +525,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.disconnectInspector();
|
||||
});
|
||||
|
||||
if ("onDidDispose" in run) {
|
||||
(run.onDidDispose as vscode.Event<void>)(() => {
|
||||
run?.end?.();
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
});
|
||||
}
|
||||
|
||||
const queue: vscode.TestItem[] = [];
|
||||
|
||||
if (request.include) {
|
||||
@@ -610,9 +547,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
await this.runTestsWithInspector(queue, run, token);
|
||||
} catch (error) {
|
||||
for (const test of queue) {
|
||||
const msg = new vscode.TestMessage(`Error: ${error}`);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
run.errored(test, new vscode.TestMessage(`Error: ${error}`));
|
||||
}
|
||||
} finally {
|
||||
run.end();
|
||||
@@ -622,11 +557,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
private async runTestsWithInspector(
|
||||
tests: vscode.TestItem[],
|
||||
run: vscode.TestRun,
|
||||
token: vscode.CancellationToken,
|
||||
_token: vscode.CancellationToken,
|
||||
): Promise<void> {
|
||||
const time = performance.now();
|
||||
if (token.isCancellationRequested) return;
|
||||
|
||||
this.disconnectInspector();
|
||||
|
||||
const allFiles = new Set<string>();
|
||||
@@ -637,20 +569,13 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
if (allFiles.size === 0) {
|
||||
const errorMsg = "No test files found to run.";
|
||||
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(errorMsg);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
throw new Error(errorMsg);
|
||||
run.appendOutput("No test files found to run.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const test of tests) {
|
||||
if (token.isCancellationRequested) return;
|
||||
if (test.uri && test.canResolveChildren) {
|
||||
await this.discoverTests(test, undefined, token);
|
||||
await this.discoverTests(test);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -659,7 +584,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
this.requestedTestIds.clear();
|
||||
this.discoveredTestIds.clear();
|
||||
this.executedTestCount = 0;
|
||||
for (const test of tests) {
|
||||
this.requestedTestIds.add(test.id);
|
||||
}
|
||||
@@ -683,38 +607,21 @@ export class BunTestController implements vscode.Disposable {
|
||||
resolve();
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
clearTimeout(timeout);
|
||||
this.signal!.off("Signal.Socket.connect", handleConnect);
|
||||
reject(new Error("Test run cancelled"));
|
||||
};
|
||||
token.onCancellationRequested(handleCancel);
|
||||
|
||||
this.signal!.once("Signal.Socket.connect", handleConnect);
|
||||
});
|
||||
|
||||
const { bunCommand, testArgs } = this.getBunExecutionConfig();
|
||||
|
||||
let args = [...testArgs, ...allFiles];
|
||||
let printedArgs = `\x1b[34;1m>\x1b[0m \x1b[34;1m${bunCommand} ${testArgs.join(" ")}\x1b[2m`;
|
||||
|
||||
for (const file of allFiles) {
|
||||
const f = path.relative(this.workspaceFolder.uri.fsPath, file) || file;
|
||||
if (f.includes(" ")) {
|
||||
printedArgs += ` ".${path.sep}${f}"`;
|
||||
} else {
|
||||
printedArgs += ` .${path.sep}${f}`;
|
||||
}
|
||||
}
|
||||
let args = [...testArgs, ...Array.from(allFiles)];
|
||||
|
||||
if (isIndividualTestRun) {
|
||||
const pattern = this.buildTestNamePattern(tests);
|
||||
if (pattern) {
|
||||
args.push("--test-name-pattern", pattern);
|
||||
printedArgs += `\x1b[0m\x1b[2m --test-name-pattern "${pattern}"\x1b[0m`;
|
||||
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
|
||||
}
|
||||
}
|
||||
run.appendOutput(printedArgs + "\x1b[0m\r\n\r\n");
|
||||
|
||||
run.appendOutput(`\r\n\x1b[34m>\x1b[0m \x1b[2m${bunCommand} ${args.join(" ")}\x1b[0m\r\n\r\n`);
|
||||
args.push(`--inspect-wait=${this.signal!.url}`);
|
||||
|
||||
for (const test of tests) {
|
||||
if (isIndividualTestRun || tests.length === 1) {
|
||||
@@ -724,52 +631,34 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
}
|
||||
|
||||
let inspectorUrl: string | undefined =
|
||||
this.signal.url.startsWith("ws") || this.signal.url.startsWith("tcp")
|
||||
? `${this.signal!.url}?wait=1`
|
||||
: `${this.signal!.url}`;
|
||||
|
||||
// right now there isnt a way to tell socket method to wait for the connection
|
||||
if (!inspectorUrl?.includes("?wait=1")) {
|
||||
args.push(`--inspect-wait=${this.signal!.url}`);
|
||||
inspectorUrl = undefined;
|
||||
}
|
||||
|
||||
const proc = spawn(bunCommand, args, {
|
||||
cwd: this.workspaceFolder.uri.fsPath,
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
FORCE_COLOR: "1",
|
||||
BUN_INSPECT: inspectorUrl,
|
||||
...process.env,
|
||||
NO_COLOR: "0",
|
||||
},
|
||||
});
|
||||
|
||||
this.activeProcesses.add(proc);
|
||||
|
||||
let stdout = "";
|
||||
|
||||
proc.on("exit", (code, signal) => {
|
||||
if (code !== 0 && code !== 1) {
|
||||
debug.appendLine(`Test process failed: exit ${code}, signal ${signal}`);
|
||||
}
|
||||
debug.appendLine(`Process exited with code ${code}, signal ${signal}`);
|
||||
});
|
||||
|
||||
proc.on("error", error => {
|
||||
stdout += `Process error: ${error.message}\n`;
|
||||
debug.appendLine(`Process error: ${error.message}`);
|
||||
});
|
||||
|
||||
proc.stdout?.on("data", data => {
|
||||
const dataStr = data.toString();
|
||||
stdout += dataStr;
|
||||
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
|
||||
run.appendOutput(formattedOutput);
|
||||
});
|
||||
|
||||
proc.stderr?.on("data", data => {
|
||||
const dataStr = data.toString();
|
||||
stdout += dataStr;
|
||||
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
|
||||
run.appendOutput(formattedOutput);
|
||||
});
|
||||
@@ -777,57 +666,35 @@ export class BunTestController implements vscode.Disposable {
|
||||
try {
|
||||
await socketPromise;
|
||||
} catch (error) {
|
||||
debug.appendLine(`Connection failed: ${error} (URL: ${this.signal!.url})`);
|
||||
debug.appendLine(`Failed to establish inspector connection: ${error}`);
|
||||
debug.appendLine(`Signal URL was: ${this.signal!.url}`);
|
||||
debug.appendLine(`Command was: ${bunCommand} ${args.join(" ")}`);
|
||||
throw error;
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const handleClose = (code: number | null) => {
|
||||
proc.on("close", code => {
|
||||
this.activeProcesses.delete(proc);
|
||||
if (code === 0 || code === 1) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Process exited with code ${code}. Please check the console for more details.`));
|
||||
reject(new Error(`Process exited with code ${code}`));
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const handleError = (error: Error) => {
|
||||
proc.on("error", error => {
|
||||
this.activeProcesses.delete(proc);
|
||||
reject(error);
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
proc.kill("SIGTERM");
|
||||
this.activeProcesses.delete(proc);
|
||||
reject(new Error("Test run cancelled"));
|
||||
};
|
||||
|
||||
proc.on("close", handleClose);
|
||||
proc.on("error", handleError);
|
||||
|
||||
token.onCancellationRequested(handleCancel);
|
||||
});
|
||||
}).finally(() => {
|
||||
if (this.discoveredTestIds.size === 0) {
|
||||
const errorMsg =
|
||||
"No tests were executed. This could mean:\r\n- All tests were filtered out\r\n- The test runner crashed before running tests\r\n- No tests match the pattern";
|
||||
run.appendOutput(`\n\x1b[31m\x1b[1mError:\x1b[0m\x1b[31m ${errorMsg}\x1b[0m\n`);
|
||||
|
||||
for (const test of tests) {
|
||||
if (!this.testResultHistory.has(test.id)) {
|
||||
const msg = new vscode.TestMessage(errorMsg + "\n\n----------\n" + stdout + "\n----------\n");
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
}
|
||||
if (isIndividualTestRun) {
|
||||
this.applyPreviousResults(tests, run);
|
||||
}
|
||||
|
||||
if (this.discoveredTestIds.size > 0 && this.executedTestCount > 0) {
|
||||
if (isIndividualTestRun) {
|
||||
this.applyPreviousResults(tests, run);
|
||||
this.cleanupUndiscoveredTests(tests);
|
||||
} else {
|
||||
this.cleanupStaleTests(tests);
|
||||
}
|
||||
if (isIndividualTestRun) {
|
||||
this.cleanupUndiscoveredTests(tests);
|
||||
} else {
|
||||
this.cleanupStaleTests(tests);
|
||||
}
|
||||
|
||||
if (this.activeProcesses.has(proc)) {
|
||||
@@ -837,7 +704,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
this.disconnectInspector();
|
||||
this.currentRun = null;
|
||||
debug.appendLine(`Test run completed in ${performance.now() - time}ms`);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -859,7 +725,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
run.passed(item, previousResult.duration);
|
||||
break;
|
||||
case "failed":
|
||||
run.failed(item, [], previousResult.duration);
|
||||
run.failed(item, previousResult.message || new vscode.TestMessage("Test failed"), previousResult.duration);
|
||||
break;
|
||||
case "skipped":
|
||||
run.skipped(item);
|
||||
@@ -897,11 +763,16 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.handleLifecycleError(event, run);
|
||||
});
|
||||
|
||||
this.debugAdapter.on("Inspector.event", e => {
|
||||
debug.appendLine(`Received inspector event: ${e.method}`);
|
||||
});
|
||||
|
||||
this.debugAdapter.on("Inspector.error", e => {
|
||||
debug.appendLine(`Inspector error: ${e}`);
|
||||
});
|
||||
|
||||
socket.on("close", () => {
|
||||
debug.appendLine("Inspector connection closed");
|
||||
this.debugAdapter = null;
|
||||
});
|
||||
|
||||
@@ -928,6 +799,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const { id: inspectorTestId, url: sourceURL, name, type, parentId, line } = params;
|
||||
|
||||
if (!sourceURL) {
|
||||
debug.appendLine(`Warning: Test found without URL: ${name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -942,6 +814,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.inspectorToVSCode.set(inspectorTestId, testItem);
|
||||
this.vscodeToInspector.set(testItem.id, inspectorTestId);
|
||||
this.discoveredTestIds.add(testItem.id);
|
||||
} else {
|
||||
debug.appendLine(`Could not find VS Code test item for: ${name} in ${path.basename(filePath)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1057,7 +931,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
if (!testItem) return;
|
||||
|
||||
const duration = elapsed / 1000000;
|
||||
this.executedTestCount++;
|
||||
|
||||
if (
|
||||
this.currentRunType === "individual" &&
|
||||
@@ -1086,6 +959,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
break;
|
||||
case "skip":
|
||||
case "todo":
|
||||
case "skipped_because_label":
|
||||
run.skipped(testItem);
|
||||
this.testResultHistory.set(testItem.id, { status: "skipped" });
|
||||
break;
|
||||
@@ -1096,8 +970,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
run.failed(testItem, timeoutMsg, duration);
|
||||
this.testResultHistory.set(testItem.id, { status: "failed", message: timeoutMsg, duration });
|
||||
break;
|
||||
case "skipped_because_label":
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1206,10 +1078,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const lines = messageLinesRaw;
|
||||
|
||||
const errorLine = lines[0].trim();
|
||||
const messageLines = lines
|
||||
.slice(1)
|
||||
.filter(line => line.trim())
|
||||
.join("\n");
|
||||
const messageLines = lines.slice(1).join("\n");
|
||||
|
||||
const errorType = errorLine.replace(/^(E|e)rror: /, "").trim();
|
||||
|
||||
@@ -1221,8 +1090,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
const regex = /^Expected:\s*([\s\S]*?)\nReceived:\s*([\s\S]*?)$/;
|
||||
let testMessage = vscode.TestMessage.diff(
|
||||
errorLine,
|
||||
messageLines.trim().match(regex)?.[1].trim() || "",
|
||||
messageLines.trim().match(regex)?.[2].trim() || "",
|
||||
messageLines.match(regex)?.[1].trim() || "",
|
||||
messageLines.match(regex)?.[2].trim() || "",
|
||||
);
|
||||
if (!messageLines.match(regex)) {
|
||||
const code = messageLines
|
||||
@@ -1284,7 +1153,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
lastEffortMsg = lastEffortMsg.reverse();
|
||||
}
|
||||
|
||||
const msg = errorType.startsWith("expect")
|
||||
const msg = errorLine.startsWith("error: expect")
|
||||
? `${lastEffortMsg.join("\n")}\n${errorLine.trim()}`.trim()
|
||||
: `${errorLine.trim()}\n${messageLines}`.trim();
|
||||
|
||||
@@ -1332,15 +1201,12 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
t = t.replaceAll(/\$\{[^}]+\}/g, ".*?");
|
||||
t = t.replaceAll(/\\\$\\\{[^}]+\\\}/g, ".*?");
|
||||
t = t.replaceAll(/\\%[isfdojp#%]|(\\%)|(\\#)/g, ".*?");
|
||||
t = t.replaceAll(/\$[\w\.\[\]]+/g, ".*?");
|
||||
t = t.replaceAll(/\\%[isfd]/g, ".*?");
|
||||
|
||||
if (test?.tags?.some(tag => tag.id === "test" || tag.id === "it")) {
|
||||
if (test.tags.some(tag => tag.id === "test" || tag.id === "it")) {
|
||||
testNames.push(`^ ${t}$`);
|
||||
} else if (test?.tags?.some(tag => tag.id === "describe")) {
|
||||
testNames.push(`^ ${t} `);
|
||||
} else {
|
||||
testNames.push(t);
|
||||
testNames.push(`^ ${t} `);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1376,13 +1242,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const isIndividualTestRun = this.shouldUseTestNamePattern(tests);
|
||||
|
||||
if (testFiles.size === 0) {
|
||||
const errorMsg = "No test files found to debug.";
|
||||
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(errorMsg);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
run.appendOutput("No test files found to debug.\n");
|
||||
run.end();
|
||||
return;
|
||||
}
|
||||
@@ -1408,7 +1268,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
const pattern = this.buildTestNamePattern(tests);
|
||||
if (pattern) {
|
||||
args.push("--test-name-pattern", pattern);
|
||||
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1429,12 +1289,9 @@ export class BunTestController implements vscode.Disposable {
|
||||
if (!res) throw new Error("Failed to start debugging session");
|
||||
} catch (error) {
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(`Error starting debugger: ${error}`);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
run.errored(test, new vscode.TestMessage(`Error starting debugger: ${error}`));
|
||||
}
|
||||
}
|
||||
run.appendOutput("\n\x1b[33mDebug session started. Please open the debug console to see its output.\x1b[0m\r\n");
|
||||
run.end();
|
||||
}
|
||||
|
||||
@@ -1461,32 +1318,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
this.disposables = [];
|
||||
}
|
||||
|
||||
// a sus way to expose internal functions to the test suite
|
||||
public get _internal() {
|
||||
return {
|
||||
expandEachTests: this.expandEachTests.bind(this),
|
||||
parseTestBlocks: this.parseTestBlocks.bind(this),
|
||||
getBraceDepth: this.getBraceDepth.bind(this),
|
||||
|
||||
buildTestNamePattern: this.buildTestNamePattern.bind(this),
|
||||
stripAnsi: this.stripAnsi.bind(this),
|
||||
processErrorData: this.processErrorData.bind(this),
|
||||
escapeTestName: this.escapeTestName.bind(this),
|
||||
shouldUseTestNamePattern: this.shouldUseTestNamePattern.bind(this),
|
||||
|
||||
isTestFile: this.isTestFile.bind(this),
|
||||
customFilePattern: this.customFilePattern.bind(this),
|
||||
getBunExecutionConfig: this.getBunExecutionConfig.bind(this),
|
||||
|
||||
findTestByPath: this.findTestByPath.bind(this),
|
||||
findTestByName: this.findTestByName.bind(this),
|
||||
createTestItem: this.createTestItem.bind(this),
|
||||
|
||||
createErrorMessage: this.createErrorMessage.bind(this),
|
||||
cleanupTestItem: this.cleanupTestItem.bind(this),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function windowsVscodeUri(uri: string): string {
|
||||
|
||||
@@ -7,14 +7,8 @@ export async function registerTests(context: vscode.ExtensionContext) {
|
||||
return;
|
||||
}
|
||||
|
||||
const config = vscode.workspace.getConfiguration("bun.test");
|
||||
const enable = config.get<boolean>("enable", true);
|
||||
if (!enable) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const controller = vscode.tests.createTestController("bun", "Bun Tests");
|
||||
const controller = vscode.tests.createTestController("bun-tests", "Bun Tests");
|
||||
context.subscriptions.push(controller);
|
||||
|
||||
const bunTestController = new BunTestController(controller, workspaceFolder);
|
||||
|
||||
0
packages/bun-vscode/src/features/tests/types.ts
Normal file
0
packages/bun-vscode/src/features/tests/types.ts
Normal file
@@ -2,8 +2,8 @@
|
||||
+++ CMakeLists.txt
|
||||
@@ -1,5 +1,5 @@
|
||||
#
|
||||
-cmake_minimum_required(VERSION 3.17 FATAL_ERROR)
|
||||
+cmake_minimum_required(VERSION 3.17...3.30 FATAL_ERROR)
|
||||
|
||||
PROJECT(libarchive C)
|
||||
#
|
||||
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12 FATAL_ERROR)
|
||||
+CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12...3.5 FATAL_ERROR)
|
||||
if(POLICY CMP0065)
|
||||
cmake_policy(SET CMP0065 NEW) #3.4 don't use `-rdynamic` with executables
|
||||
endif()
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:29:58.505101515 +0000
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:44:09.023676935 +0000
|
||||
@@ -59,12 +59,13 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c
|
||||
@@ -58,6 +58,7 @@ archive_write_set_compression_gzip(struct archive *a)
|
||||
struct private_data {
|
||||
int compression_level;
|
||||
int timestamp;
|
||||
char *original_filename;
|
||||
+ unsigned char os;
|
||||
+ unsigned char os;
|
||||
#ifdef HAVE_ZLIB_H
|
||||
z_stream stream;
|
||||
int64_t total_in;
|
||||
unsigned char *compressed;
|
||||
size_t compressed_buffer_size;
|
||||
- unsigned long crc;
|
||||
+ uint32_t crc;
|
||||
#else
|
||||
struct archive_write_program_data *pdata;
|
||||
#endif
|
||||
@@ -108,6 +109,7 @@
|
||||
@@ -106,6 +107,7 @@ archive_write_add_filter_gzip(struct archive *_a)
|
||||
archive_set_error(&a->archive, ENOMEM, "Out of memory");
|
||||
return (ARCHIVE_FATAL);
|
||||
}
|
||||
f->data = data;
|
||||
+ data->os = 3; /* default Unix */
|
||||
f->data = data;
|
||||
f->open = &archive_compressor_gzip_open;
|
||||
f->options = &archive_compressor_gzip_options;
|
||||
f->close = &archive_compressor_gzip_close;
|
||||
@@ -177,6 +179,30 @@
|
||||
@@ -166,6 +168,30 @@ archive_compressor_gzip_options(struct archive_write_filter *f, const char *key,
|
||||
return (ARCHIVE_OK);
|
||||
}
|
||||
|
||||
@@ -54,7 +47,7 @@
|
||||
/* Note: The "warn" return is just to inform the options
|
||||
* supervisor that we didn't handle it. It will generate
|
||||
* a suitable error if no one used this option. */
|
||||
@@ -236,7 +262,7 @@
|
||||
@@ -226,7 +252,7 @@ archive_compressor_gzip_open(struct archive_write_filter *f)
|
||||
data->compressed[8] = 4;
|
||||
else
|
||||
data->compressed[8] = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Version: 11
|
||||
# Version: 10
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on Windows 10 or newer with PowerShell.
|
||||
|
||||
@@ -282,8 +282,6 @@ function Install-Build-Essentials {
|
||||
strawberryperl `
|
||||
mingw
|
||||
Install-Rust
|
||||
# Needed to remap stack traces
|
||||
Install-PdbAddr2line
|
||||
Install-Llvm
|
||||
}
|
||||
|
||||
@@ -344,10 +342,6 @@ function Install-Rust {
|
||||
Add-To-Path "$rustPath\cargo\bin"
|
||||
}
|
||||
|
||||
function Install-PdbAddr2line {
|
||||
Execute-Command cargo install --examples "pdb-addr2line@0.11.2"
|
||||
}
|
||||
|
||||
function Install-Llvm {
|
||||
Install-Package llvm `
|
||||
-Command clang-cl `
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 18
|
||||
# Version: 17
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -1521,9 +1521,6 @@ configure_core_dumps() {
|
||||
if [ -d /sbin ]; then
|
||||
append_to_path /sbin
|
||||
fi
|
||||
|
||||
# install gdb for backtraces
|
||||
install_packages gdb
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
@@ -224,31 +224,6 @@ async function spawn(command, args, options, label) {
|
||||
...options,
|
||||
});
|
||||
|
||||
let killedManually = false;
|
||||
|
||||
function onKill() {
|
||||
clearOnKill();
|
||||
if (!subprocess.killed) {
|
||||
killedManually = true;
|
||||
subprocess.kill?.();
|
||||
}
|
||||
}
|
||||
|
||||
function clearOnKill() {
|
||||
process.off("beforeExit", onKill);
|
||||
process.off("SIGINT", onKill);
|
||||
process.off("SIGTERM", onKill);
|
||||
}
|
||||
|
||||
// Kill the entire process tree so everything gets cleaned up. On Windows, job
|
||||
// control groups make this haappen automatically so we don't need to do this
|
||||
// on Windows.
|
||||
if (process.platform !== "win32") {
|
||||
process.once("beforeExit", onKill);
|
||||
process.once("SIGINT", onKill);
|
||||
process.once("SIGTERM", onKill);
|
||||
}
|
||||
|
||||
let timestamp;
|
||||
subprocess.on("spawn", () => {
|
||||
timestamp = Date.now();
|
||||
@@ -278,14 +253,8 @@ async function spawn(command, args, options, label) {
|
||||
}
|
||||
|
||||
const { error, exitCode, signalCode } = await new Promise(resolve => {
|
||||
subprocess.on("error", error => {
|
||||
clearOnKill();
|
||||
resolve({ error });
|
||||
});
|
||||
subprocess.on("exit", (exitCode, signalCode) => {
|
||||
clearOnKill();
|
||||
resolve({ exitCode, signalCode });
|
||||
});
|
||||
subprocess.on("error", error => resolve({ error }));
|
||||
subprocess.on("exit", (exitCode, signalCode) => resolve({ exitCode, signalCode }));
|
||||
});
|
||||
|
||||
if (done) {
|
||||
@@ -332,9 +301,7 @@ async function spawn(command, args, options, label) {
|
||||
}
|
||||
|
||||
if (signalCode) {
|
||||
if (!killedManually) {
|
||||
console.error(`Command killed: ${signalCode}`);
|
||||
}
|
||||
console.error(`Command killed: ${signalCode}`);
|
||||
} else {
|
||||
console.error(`Command exited: code ${exitCode}`);
|
||||
}
|
||||
|
||||
@@ -44,21 +44,17 @@ if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${
|
||||
await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`;
|
||||
|
||||
console.log("moving cores out of nested directory");
|
||||
for await (const file of new Bun.Glob("bun-cores-*/*.core").scan(dir)) {
|
||||
for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) {
|
||||
fs.renameSync(join(dir, file), join(dir, basename(file)));
|
||||
}
|
||||
} else {
|
||||
console.log(`already downloaded in ${dir}`);
|
||||
}
|
||||
|
||||
const desiredCore = join(dir, (await new Bun.Glob(`*${pid}.core`).scan(dir).next()).value);
|
||||
|
||||
const args = [debuggerPath, "--core", desiredCore, join(dir, "bun-profile")];
|
||||
|
||||
console.log("launching debugger:");
|
||||
console.log(args.map(Bun.$.escape).join(" "));
|
||||
console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`);
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], {
|
||||
stdin: "inherit",
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
|
||||
@@ -29,27 +29,6 @@ const formatTime = (ms: number): string => {
|
||||
const start = Date.now();
|
||||
let totalTimeEstimate = -1;
|
||||
|
||||
function report() {
|
||||
process.stdout.write("\n");
|
||||
const attemptsReached =
|
||||
numOk + numTimedOut + signals.values().reduce((a, b) => a + b, 0) + codes.values().reduce((a, b) => a + b, 0);
|
||||
|
||||
green(`${pad(numOk)}/${attemptsReached} OK`);
|
||||
if (numTimedOut > 0) {
|
||||
red(`${pad(numTimedOut)}/${attemptsReached} timeout`);
|
||||
}
|
||||
for (const [signal, count] of signals.entries()) {
|
||||
red(`${pad(count)}/${attemptsReached} ${signal}`);
|
||||
}
|
||||
for (const [code, count] of codes.entries()) {
|
||||
red(`${pad(count)}/${attemptsReached} code ${code}`);
|
||||
}
|
||||
|
||||
process.exit(numOk === attemptsReached ? 0 : 1);
|
||||
}
|
||||
|
||||
process.on("SIGINT", report);
|
||||
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
const proc = Bun.spawn({
|
||||
cmd: argv,
|
||||
@@ -96,5 +75,17 @@ for (let i = 0; i < attempts; i++) {
|
||||
const remaining = totalTimeEstimate - (now - start);
|
||||
process.stdout.write(`\r\x1b[2K${pad(i + 1)}/${attempts} completed, ${formatTime(remaining)} remaining`);
|
||||
}
|
||||
process.stdout.write("\n");
|
||||
|
||||
report();
|
||||
green(`${pad(numOk)}/${attempts} OK`);
|
||||
if (numTimedOut > 0) {
|
||||
red(`${pad(numTimedOut)}/${attempts} timeout`);
|
||||
}
|
||||
for (const [signal, count] of signals.entries()) {
|
||||
red(`${pad(count)}/${attempts} ${signal}`);
|
||||
}
|
||||
for (const [code, count] of codes.entries()) {
|
||||
red(`${pad(count)}/${attempts} code ${code}`);
|
||||
}
|
||||
|
||||
process.exit(numOk === attempts ? 0 : 1);
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* p-limit@6.2.0
|
||||
* https://github.com/sindresorhus/p-limit
|
||||
* MIT (c) Sindre Sorhus
|
||||
*/
|
||||
|
||||
import Queue from "./yocto-queue.mjs";
|
||||
|
||||
export default function pLimit(concurrency) {
|
||||
validateConcurrency(concurrency);
|
||||
|
||||
const queue = new Queue();
|
||||
let activeCount = 0;
|
||||
|
||||
const resumeNext = () => {
|
||||
if (activeCount < concurrency && queue.size > 0) {
|
||||
queue.dequeue()();
|
||||
// Since `pendingCount` has been decreased by one, increase `activeCount` by one.
|
||||
activeCount++;
|
||||
}
|
||||
};
|
||||
|
||||
const next = () => {
|
||||
activeCount--;
|
||||
|
||||
resumeNext();
|
||||
};
|
||||
|
||||
const run = async (function_, resolve, arguments_) => {
|
||||
const result = (async () => function_(...arguments_))();
|
||||
|
||||
resolve(result);
|
||||
|
||||
try {
|
||||
await result;
|
||||
} catch {}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
const enqueue = (function_, resolve, arguments_) => {
|
||||
// Queue `internalResolve` instead of the `run` function
|
||||
// to preserve asynchronous context.
|
||||
new Promise(internalResolve => {
|
||||
queue.enqueue(internalResolve);
|
||||
}).then(run.bind(undefined, function_, resolve, arguments_));
|
||||
|
||||
(async () => {
|
||||
// This function needs to wait until the next microtask before comparing
|
||||
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
|
||||
// after the `internalResolve` function is dequeued and called. The comparison in the if-statement
|
||||
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
|
||||
await Promise.resolve();
|
||||
|
||||
if (activeCount < concurrency) {
|
||||
resumeNext();
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
const generator = (function_, ...arguments_) =>
|
||||
new Promise(resolve => {
|
||||
enqueue(function_, resolve, arguments_);
|
||||
});
|
||||
|
||||
Object.defineProperties(generator, {
|
||||
activeCount: {
|
||||
get: () => activeCount,
|
||||
},
|
||||
pendingCount: {
|
||||
get: () => queue.size,
|
||||
},
|
||||
clearQueue: {
|
||||
value() {
|
||||
queue.clear();
|
||||
},
|
||||
},
|
||||
concurrency: {
|
||||
get: () => concurrency,
|
||||
|
||||
set(newConcurrency) {
|
||||
validateConcurrency(newConcurrency);
|
||||
concurrency = newConcurrency;
|
||||
|
||||
queueMicrotask(() => {
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while (activeCount < concurrency && queue.size > 0) {
|
||||
resumeNext();
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return generator;
|
||||
}
|
||||
|
||||
export function limitFunction(function_, option) {
|
||||
const { concurrency } = option;
|
||||
const limit = pLimit(concurrency);
|
||||
|
||||
return (...arguments_) => limit(() => function_(...arguments_));
|
||||
}
|
||||
|
||||
function validateConcurrency(concurrency) {
|
||||
if (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {
|
||||
throw new TypeError("Expected `concurrency` to be a number from 1 and up");
|
||||
}
|
||||
}
|
||||
@@ -28,12 +28,9 @@ import {
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { availableParallelism, userInfo } from "node:os";
|
||||
import { userInfo } from "node:os";
|
||||
import { basename, dirname, extname, join, relative, sep } from "node:path";
|
||||
import { createInterface } from "node:readline";
|
||||
import { setTimeout as setTimeoutPromise } from "node:timers/promises";
|
||||
import { parseArgs } from "node:util";
|
||||
import pLimit from "./p-limit.mjs";
|
||||
import {
|
||||
getAbi,
|
||||
getAbiVersion,
|
||||
@@ -66,7 +63,6 @@ import {
|
||||
unzip,
|
||||
uploadArtifact,
|
||||
} from "./utils.mjs";
|
||||
|
||||
let isQuiet = false;
|
||||
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
|
||||
const testsPath = join(cwd, "test");
|
||||
@@ -74,6 +70,7 @@ const testsPath = join(cwd, "test");
|
||||
const spawnTimeout = 5_000;
|
||||
const testTimeout = 3 * 60_000;
|
||||
const integrationTimeout = 5 * 60_000;
|
||||
const napiTimeout = 10 * 60_000;
|
||||
|
||||
function getNodeParallelTestTimeout(testPath) {
|
||||
if (testPath.includes("test-dns")) {
|
||||
@@ -156,10 +153,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "boolean",
|
||||
default: isBuildkite && isLinux,
|
||||
},
|
||||
["parallel"]: {
|
||||
type: "boolean",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -178,25 +171,6 @@ if (options["quiet"]) {
|
||||
isQuiet = true;
|
||||
}
|
||||
|
||||
let coresDir;
|
||||
|
||||
if (options["coredump-upload"]) {
|
||||
// this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch
|
||||
const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] });
|
||||
coresDir = sysctl.stdout;
|
||||
if (sysctl.ok) {
|
||||
if (coresDir.startsWith("|")) {
|
||||
throw new Error("cores are being piped not saved");
|
||||
}
|
||||
// change /foo/bar/%e-%p.core to /foo/bar
|
||||
coresDir = dirname(sysctl.stdout);
|
||||
} else {
|
||||
throw new Error(`Failed to check core_pattern: ${sysctl.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
let remapPort = undefined;
|
||||
|
||||
/**
|
||||
* @typedef {Object} TestExpectation
|
||||
* @property {string} filename
|
||||
@@ -367,10 +341,6 @@ async function runTests() {
|
||||
const failedResults = [];
|
||||
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
|
||||
|
||||
const parallelism = options["parallel"] ? availableParallelism() : 1;
|
||||
console.log("parallelism", parallelism);
|
||||
const limit = pLimit(parallelism);
|
||||
|
||||
/**
|
||||
* @param {string} title
|
||||
* @param {function} fn
|
||||
@@ -380,21 +350,17 @@ async function runTests() {
|
||||
const index = ++i;
|
||||
|
||||
let result, failure, flaky;
|
||||
let attempt = 1;
|
||||
for (; attempt <= maxAttempts; attempt++) {
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
if (attempt > 1) {
|
||||
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
|
||||
}
|
||||
|
||||
let grouptitle = `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`;
|
||||
if (attempt > 1) grouptitle += ` ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`;
|
||||
|
||||
if (parallelism > 1) {
|
||||
console.log(grouptitle);
|
||||
result = await fn();
|
||||
} else {
|
||||
result = await startGroup(grouptitle, fn);
|
||||
}
|
||||
result = await startGroup(
|
||||
attempt === 1
|
||||
? `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`
|
||||
: `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title} ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`,
|
||||
fn,
|
||||
);
|
||||
|
||||
const { ok, stdoutPreview, error } = result;
|
||||
if (ok) {
|
||||
@@ -409,7 +375,6 @@ async function runTests() {
|
||||
const color = attempt >= maxAttempts ? "red" : "yellow";
|
||||
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
if (parallelism > 1) return;
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
|
||||
@@ -430,15 +395,14 @@ async function runTests() {
|
||||
// Group flaky tests together, regardless of the title
|
||||
const context = flaky ? "flaky" : title;
|
||||
const style = flaky || title.startsWith("vendor") ? "warning" : "error";
|
||||
if (!flaky) attempt = 1; // no need to show the retries count on failures, we know it maxed out
|
||||
|
||||
if (title.startsWith("vendor")) {
|
||||
const content = formatTestToMarkdown({ ...failure, testPath: title }, false, attempt - 1);
|
||||
const content = formatTestToMarkdown({ ...failure, testPath: title });
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
}
|
||||
} else {
|
||||
const content = formatTestToMarkdown(failure, false, attempt - 1);
|
||||
const content = formatTestToMarkdown(failure);
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
}
|
||||
@@ -448,10 +412,10 @@ async function runTests() {
|
||||
if (isGithubAction) {
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
if (summaryPath) {
|
||||
const longMarkdown = formatTestToMarkdown(failure, false, attempt - 1);
|
||||
const longMarkdown = formatTestToMarkdown(failure);
|
||||
appendFileSync(summaryPath, longMarkdown);
|
||||
}
|
||||
const shortMarkdown = formatTestToMarkdown(failure, true, attempt - 1);
|
||||
const shortMarkdown = formatTestToMarkdown(failure, true);
|
||||
appendFileSync("comment.md", shortMarkdown);
|
||||
}
|
||||
|
||||
@@ -470,100 +434,48 @@ async function runTests() {
|
||||
}
|
||||
|
||||
if (!failedResults.length) {
|
||||
// TODO: remove windows exclusion here
|
||||
if (isCI && !isWindows) {
|
||||
// bun install has succeeded
|
||||
const { promise: portPromise, resolve: portResolve } = Promise.withResolvers();
|
||||
const { promise: errorPromise, resolve: errorResolve } = Promise.withResolvers();
|
||||
console.log("run in", cwd);
|
||||
let exiting = false;
|
||||
|
||||
const server = spawn(execPath, ["run", "ci-remap-server", execPath, cwd, getCommit()], {
|
||||
stdio: ["ignore", "pipe", "inherit"],
|
||||
cwd, // run in main repo
|
||||
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: "1", NO_COLOR: "1" },
|
||||
});
|
||||
server.unref();
|
||||
server.on("error", errorResolve);
|
||||
server.on("exit", (code, signal) => {
|
||||
if (!exiting && (code !== 0 || signal !== null)) errorResolve(signal ? signal : "code " + code);
|
||||
});
|
||||
process.on("exit", () => {
|
||||
exiting = true;
|
||||
server.kill();
|
||||
});
|
||||
const lines = createInterface(server.stdout);
|
||||
lines.on("line", line => {
|
||||
portResolve({ port: parseInt(line) });
|
||||
});
|
||||
|
||||
const result = await Promise.race([portPromise, errorPromise, setTimeoutPromise(5000, "timeout")]);
|
||||
if (typeof result.port != "number") {
|
||||
server.kill();
|
||||
console.warn("ci-remap server did not start:", result);
|
||||
for (const testPath of tests) {
|
||||
const absoluteTestPath = join(testsPath, testPath);
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
const mb = 1024 ** 3;
|
||||
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
|
||||
return {
|
||||
testPath: title,
|
||||
ok: ok,
|
||||
status: ok ? "pass" : "fail",
|
||||
error: error,
|
||||
errors: [],
|
||||
tests: [],
|
||||
stdout: stdout,
|
||||
stdoutPreview: stdoutPreview,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
console.log("crash reports parsed on port", result.port);
|
||||
remapPort = result.port;
|
||||
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
tests.map(testPath =>
|
||||
limit(() => {
|
||||
const absoluteTestPath = join(testsPath, testPath);
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
return runTest(title, async () => {
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [
|
||||
subcommand,
|
||||
"--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"),
|
||||
absoluteTestPath,
|
||||
],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env,
|
||||
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
const mb = 1024 ** 3;
|
||||
let stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
|
||||
if (crashes) stdoutPreview += crashes;
|
||||
return {
|
||||
testPath: title,
|
||||
ok: ok,
|
||||
status: ok ? "pass" : "fail",
|
||||
error: error,
|
||||
errors: [],
|
||||
tests: [],
|
||||
stdout: stdout,
|
||||
stdoutPreview: stdoutPreview,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
return runTest(title, async () =>
|
||||
spawnBunTest(execPath, join("test", testPath), {
|
||||
cwd,
|
||||
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
|
||||
}),
|
||||
);
|
||||
}
|
||||
}),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (vendorTests?.length) {
|
||||
@@ -605,7 +517,7 @@ async function runTests() {
|
||||
|
||||
if (isGithubAction) {
|
||||
reportOutputToGitHubAction("failing_tests_count", failedResults.length);
|
||||
const markdown = formatTestToMarkdown(failedResults, false, 0);
|
||||
const markdown = formatTestToMarkdown(failedResults);
|
||||
reportOutputToGitHubAction("failing_tests", markdown);
|
||||
}
|
||||
|
||||
@@ -700,6 +612,19 @@ async function runTests() {
|
||||
|
||||
if (options["coredump-upload"]) {
|
||||
try {
|
||||
// this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch
|
||||
const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] });
|
||||
let coresDir = sysctl.stdout;
|
||||
if (sysctl.ok) {
|
||||
if (coresDir.startsWith("|")) {
|
||||
throw new Error("cores are being piped not saved");
|
||||
}
|
||||
// change /foo/bar/%e-%p.core to /foo/bar
|
||||
coresDir = dirname(sysctl.stdout);
|
||||
} else {
|
||||
throw new Error(`Failed to check core_pattern: ${sysctl.error}`);
|
||||
}
|
||||
|
||||
const coresDirBase = dirname(coresDir);
|
||||
const coresDirName = basename(coresDir);
|
||||
const coreFileNames = readdirSync(coresDir);
|
||||
@@ -805,7 +730,6 @@ async function runTests() {
|
||||
* @property {number} timestamp
|
||||
* @property {number} duration
|
||||
* @property {string} stdout
|
||||
* @property {number} [pid]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -856,25 +780,6 @@ async function spawnSafe(options) {
|
||||
};
|
||||
await new Promise(resolve => {
|
||||
try {
|
||||
function unsafeBashEscape(str) {
|
||||
if (!str) return "";
|
||||
if (str.includes(" ")) return JSON.stringify(str);
|
||||
return str;
|
||||
}
|
||||
if (process.env.SHOW_SPAWN_COMMANDS) {
|
||||
console.log(
|
||||
"SPAWNING COMMAND:\n" +
|
||||
[
|
||||
"echo -n | " +
|
||||
Object.entries(env)
|
||||
.map(([key, value]) => `${unsafeBashEscape(key)}=${unsafeBashEscape(value)}`)
|
||||
.join(" "),
|
||||
unsafeBashEscape(command),
|
||||
...args.map(unsafeBashEscape),
|
||||
].join(" ") +
|
||||
" | cat",
|
||||
);
|
||||
}
|
||||
subprocess = spawn(command, args, {
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
timeout,
|
||||
@@ -986,7 +891,6 @@ async function spawnSafe(options) {
|
||||
stdout: buffer,
|
||||
timestamp: timestamp || Date.now(),
|
||||
duration: duration || 0,
|
||||
pid: subprocess?.pid,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1015,16 +919,10 @@ function getCombinedPath(execPath) {
|
||||
return _combinedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} SpawnBunResult
|
||||
* @extends SpawnResult
|
||||
* @property {string} [crashes]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} execPath Path to bun binary
|
||||
* @param {SpawnOptions} options
|
||||
* @returns {Promise<SpawnBunResult>}
|
||||
* @returns {Promise<SpawnResult>}
|
||||
*/
|
||||
async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
const path = getCombinedPath(execPath);
|
||||
@@ -1043,13 +941,11 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
|
||||
BUN_JSC_randomIntegrityAuditRate: "1.0",
|
||||
BUN_ENABLE_CRASH_REPORTING: "0", // change this to '1' if https://github.com/oven-sh/bun/issues/13012 is implemented
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
BUN_INSTALL_CACHE_DIR: tmpdirPath,
|
||||
SHELLOPTS: isWindows ? "igncr" : undefined, // ignore "\r" on Windows
|
||||
TEST_TMPDIR: tmpdirPath, // Used in Node.js tests.
|
||||
...(typeof remapPort == "number"
|
||||
? { BUN_CRASH_REPORT_URL: `http://localhost:${remapPort}` }
|
||||
: { BUN_ENABLE_CRASH_REPORTING: "0" }),
|
||||
};
|
||||
|
||||
if (basename(execPath).includes("asan")) {
|
||||
@@ -1073,8 +969,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
bunEnv["TEMP"] = tmpdirPath;
|
||||
}
|
||||
try {
|
||||
const existingCores = options["coredump-upload"] ? readdirSync(coresDir) : [];
|
||||
const result = await spawnSafe({
|
||||
return await spawnSafe({
|
||||
command: execPath,
|
||||
args,
|
||||
cwd,
|
||||
@@ -1083,87 +978,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
stdout,
|
||||
stderr,
|
||||
});
|
||||
const newCores = options["coredump-upload"] ? readdirSync(coresDir).filter(c => !existingCores.includes(c)) : [];
|
||||
let crashes = "";
|
||||
if (options["coredump-upload"] && (result.signalCode !== null || newCores.length > 0)) {
|
||||
// warn if the main PID crashed and we don't have a core
|
||||
if (result.signalCode !== null && !newCores.some(c => c.endsWith(`${result.pid}.core`))) {
|
||||
crashes += `main process killed by ${result.signalCode} but no core file found\n`;
|
||||
}
|
||||
|
||||
if (newCores.length > 0) {
|
||||
result.ok = false;
|
||||
if (!isAlwaysFailure(result.error)) result.error = "core dumped";
|
||||
}
|
||||
|
||||
for (const coreName of newCores) {
|
||||
const corePath = join(coresDir, coreName);
|
||||
let out = "";
|
||||
const gdb = await spawnSafe({
|
||||
command: "gdb",
|
||||
args: ["-batch", `--eval-command=bt`, "--core", corePath, execPath],
|
||||
timeout: 240_000,
|
||||
stderr: () => {},
|
||||
stdout(text) {
|
||||
out += text;
|
||||
},
|
||||
});
|
||||
if (!gdb.ok) {
|
||||
crashes += `failed to get backtrace from GDB: ${gdb.error}\n`;
|
||||
} else {
|
||||
crashes += `======== Stack trace from GDB for ${coreName}: ========\n`;
|
||||
for (const line of out.split("\n")) {
|
||||
// filter GDB output since it is pretty verbose
|
||||
if (
|
||||
line.startsWith("Program terminated") ||
|
||||
line.startsWith("#") || // gdb backtrace lines start with #0, #1, etc.
|
||||
line.startsWith("[Current thread is")
|
||||
) {
|
||||
crashes += line + "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip this if the remap server didn't work or if Bun exited normally
|
||||
// (tests in which a subprocess crashed should at least set exit code 1)
|
||||
if (typeof remapPort == "number" && result.exitCode !== 0) {
|
||||
try {
|
||||
// When Bun crashes, it exits before the subcommand it runs to upload the crash report has necessarily finished.
|
||||
// So wait a little bit to make sure that the crash report has at least started uploading
|
||||
// (once the server sees the /ack request then /traces will wait for any crashes to finish processing)
|
||||
// There is a bug that if a test causes crash reports but exits with code 0, the crash reports will instead
|
||||
// be attributed to the next test that fails. I'm not sure how to fix this without adding a sleep in between
|
||||
// all tests (which would slow down CI a lot).
|
||||
await setTimeoutPromise(500);
|
||||
const response = await fetch(`http://localhost:${remapPort}/traces`);
|
||||
if (!response.ok || response.status !== 200) throw new Error(`server responded with code ${response.status}`);
|
||||
const traces = await response.json();
|
||||
if (traces.length > 0) {
|
||||
result.ok = false;
|
||||
if (!isAlwaysFailure(result.error)) result.error = "crash reported";
|
||||
|
||||
crashes += `${traces.length} crashes reported during this test\n`;
|
||||
for (const t of traces) {
|
||||
if (t.failed_parse) {
|
||||
crashes += "Trace string failed to parse:\n";
|
||||
crashes += t.failed_parse + "\n";
|
||||
} else if (t.failed_remap) {
|
||||
crashes += "Parsed trace failed to remap:\n";
|
||||
crashes += JSON.stringify(t.failed_remap, null, 2) + "\n";
|
||||
} else {
|
||||
crashes += "================\n";
|
||||
crashes += t.remap + "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
crashes += "failed to fetch traces: " + e.toString() + "\n";
|
||||
}
|
||||
}
|
||||
if (crashes.length > 0) result.crashes = crashes;
|
||||
return result;
|
||||
} finally {
|
||||
try {
|
||||
rmSync(tmpdirPath, { recursive: true, force: true });
|
||||
@@ -1245,20 +1059,19 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env,
|
||||
stdout: options.stdout,
|
||||
stderr: options.stderr,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
let { tests, errors, stdout: stdoutPreview } = parseTestStdout(stdout, testPath);
|
||||
if (crashes) stdoutPreview += crashes;
|
||||
const { tests, errors, stdout: stdoutPreview } = parseTestStdout(stdout, testPath);
|
||||
|
||||
// If we generated a JUnit file and we're on BuildKite, upload it immediately
|
||||
if (junitFilePath && isReallyTest && isBuildkite && cliOptions["junit-upload"]) {
|
||||
@@ -1288,6 +1101,9 @@ function getTestTimeout(testPath) {
|
||||
if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) {
|
||||
return integrationTimeout;
|
||||
}
|
||||
if (/napi/i.test(testPath) || /v8/i.test(testPath)) {
|
||||
return napiTimeout;
|
||||
}
|
||||
return testTimeout;
|
||||
}
|
||||
|
||||
@@ -1430,12 +1246,11 @@ function parseTestStdout(stdout, testPath) {
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunInstall(execPath, options) {
|
||||
let { ok, error, stdout, duration, crashes } = await spawnBun(execPath, {
|
||||
const { ok, error, stdout, duration } = await spawnBun(execPath, {
|
||||
args: ["install"],
|
||||
timeout: testTimeout,
|
||||
...options,
|
||||
});
|
||||
if (crashes) stdout += crashes;
|
||||
const relativePath = relative(cwd, options.cwd);
|
||||
const testPath = join(relativePath, "package.json");
|
||||
const status = ok ? "pass" : "fail";
|
||||
@@ -1602,30 +1417,20 @@ async function getVendorTests(cwd) {
|
||||
const vendorPath = join(cwd, "vendor", name);
|
||||
|
||||
if (!existsSync(vendorPath)) {
|
||||
const { ok, error } = await spawnSafe({
|
||||
await spawnSafe({
|
||||
command: "git",
|
||||
args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath],
|
||||
timeout: testTimeout,
|
||||
cwd,
|
||||
});
|
||||
if (!ok) throw new Error(`failed to git clone vendor '${name}': ${error}`);
|
||||
}
|
||||
|
||||
let { ok, error } = await spawnSafe({
|
||||
await spawnSafe({
|
||||
command: "git",
|
||||
args: ["fetch", "--depth", "1", "origin", "tag", tag],
|
||||
timeout: testTimeout,
|
||||
cwd: vendorPath,
|
||||
});
|
||||
if (!ok) throw new Error(`failed to fetch tag ${tag} for vendor '${name}': ${error}`);
|
||||
|
||||
({ ok, error } = await spawnSafe({
|
||||
command: "git",
|
||||
args: ["checkout", tag],
|
||||
timeout: testTimeout,
|
||||
cwd: vendorPath,
|
||||
}));
|
||||
if (!ok) throw new Error(`failed to checkout tag ${tag} for vendor '${name}': ${error}`);
|
||||
|
||||
const packageJsonPath = join(vendorPath, "package.json");
|
||||
if (!existsSync(packageJsonPath)) {
|
||||
@@ -1926,10 +1731,9 @@ function getTestLabel() {
|
||||
/**
|
||||
* @param {TestResult | TestResult[]} result
|
||||
* @param {boolean} concise
|
||||
* @param {number} retries
|
||||
* @returns {string}
|
||||
*/
|
||||
function formatTestToMarkdown(result, concise, retries) {
|
||||
function formatTestToMarkdown(result, concise) {
|
||||
const results = Array.isArray(result) ? result : [result];
|
||||
const buildLabel = getTestLabel();
|
||||
const buildUrl = getBuildUrl();
|
||||
@@ -1973,9 +1777,6 @@ function formatTestToMarkdown(result, concise, retries) {
|
||||
if (platform) {
|
||||
markdown += ` on ${platform}`;
|
||||
}
|
||||
if (retries > 0) {
|
||||
markdown += ` (${retries} ${retries === 1 ? "retry" : "retries"})`;
|
||||
}
|
||||
|
||||
if (concise) {
|
||||
markdown += "</li>\n";
|
||||
@@ -2180,9 +1981,7 @@ function isAlwaysFailure(error) {
|
||||
error.includes("segmentation fault") ||
|
||||
error.includes("illegal instruction") ||
|
||||
error.includes("sigtrap") ||
|
||||
error.includes("error: addresssanitizer") ||
|
||||
error.includes("core dumped") ||
|
||||
error.includes("crash reported")
|
||||
error.includes("error: addresssanitizer")
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
87
scripts/sort-imports.ts → scripts/sortImports.ts
Executable file → Normal file
87
scripts/sort-imports.ts → scripts/sortImports.ts
Executable file → Normal file
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env bun
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
@@ -17,9 +16,10 @@ const usage = String.raw`
|
||||
Usage: bun scripts/sortImports [options] <files...>
|
||||
|
||||
Options:
|
||||
--help Show this help message
|
||||
--include-pub Also sort ${"`pub`"} imports
|
||||
--keep-unused Don't remove unused imports
|
||||
--help Show this help message
|
||||
--no-include-pub Exclude pub imports from sorting
|
||||
--no-remove-unused Don't remove unused imports
|
||||
--include-unsorted Process files even if they don't have @sortImports marker
|
||||
|
||||
Examples:
|
||||
bun scripts/sortImports src
|
||||
@@ -34,9 +34,9 @@ if (filePaths.length === 0) {
|
||||
}
|
||||
|
||||
const config = {
|
||||
includePub: args.includes("--include-pub"),
|
||||
removeUnused: !args.includes("--keep-unused"),
|
||||
normalizePaths: "./",
|
||||
includePub: !args.includes("--no-include-pub"),
|
||||
removeUnused: !args.includes("--no-remove-unused"),
|
||||
includeUnsorted: args.includes("--include-unsorted"),
|
||||
};
|
||||
|
||||
// Type definitions
|
||||
@@ -68,11 +68,11 @@ function parseDeclarations(
|
||||
const line = lines[i];
|
||||
|
||||
if (line === "// @sortImports") {
|
||||
lines[i] = DELETED_LINE;
|
||||
lines[i] = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);(\s*\/\/[^\n]*)?$/;
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
|
||||
const match = line.match(inlineDeclPattern);
|
||||
|
||||
if (!match) continue;
|
||||
@@ -102,10 +102,6 @@ function parseDeclarations(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (declarations.has(name)) {
|
||||
unusedLineIndices.push(i);
|
||||
continue;
|
||||
}
|
||||
declarations.set(name, {
|
||||
whole: line,
|
||||
index: i,
|
||||
@@ -279,6 +275,8 @@ function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
|
||||
// Generate the sorted output
|
||||
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
|
||||
const outputLines = [...lines];
|
||||
outputLines.push("");
|
||||
outputLines.push("// @sortImports");
|
||||
|
||||
for (const groupKey of sortedGroupKeys) {
|
||||
const groupDeclarations = groups.get(groupKey)!;
|
||||
@@ -290,36 +288,22 @@ function generateSortedOutput(lines: string[], groups: Map<string, Group>, sorte
|
||||
// Add declarations to output and mark original lines for removal
|
||||
for (const declaration of groupDeclarations.declarations) {
|
||||
outputLines.push(declaration.whole);
|
||||
outputLines[declaration.index] = DELETED_LINE;
|
||||
outputLines[declaration.index] = "";
|
||||
}
|
||||
}
|
||||
|
||||
return outputLines;
|
||||
}
|
||||
|
||||
function extractThisDeclaration(declarations: Map<string, Declaration>): Declaration | null {
|
||||
for (const declaration of declarations.values()) {
|
||||
if (declaration.value === "@This()") {
|
||||
declarations.delete(declaration.key);
|
||||
return declaration;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const DELETED_LINE = "%DELETED_LINE%";
|
||||
|
||||
// Main execution function for a single file
|
||||
async function processFile(filePath: string): Promise<void> {
|
||||
const originalFileContents = await Bun.file(filePath).text();
|
||||
let fileContents = originalFileContents;
|
||||
|
||||
if (config.normalizePaths === "") {
|
||||
fileContents = fileContents.replaceAll(`@import("./`, `@import("`);
|
||||
} else if (config.normalizePaths === "./") {
|
||||
fileContents = fileContents.replaceAll(/@import\("([A-Za-z0-9_-][^"]*\.zig)"\)/g, '@import("./$1")');
|
||||
fileContents = fileContents.replaceAll(`@import("./../`, `@import("../`);
|
||||
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
|
||||
return;
|
||||
}
|
||||
console.log(`Processing: ${filePath}`);
|
||||
|
||||
let needsRecurse = true;
|
||||
while (needsRecurse) {
|
||||
@@ -328,7 +312,6 @@ async function processFile(filePath: string): Promise<void> {
|
||||
const lines = fileContents.split("\n");
|
||||
|
||||
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
|
||||
const thisDeclaration = extractThisDeclaration(declarations);
|
||||
const groups = groupDeclarationsByImportPath(declarations);
|
||||
|
||||
promoteItemsWithChildGroups(groups);
|
||||
@@ -340,46 +323,13 @@ async function processFile(filePath: string): Promise<void> {
|
||||
// Remove unused declarations
|
||||
if (config.removeUnused) {
|
||||
for (const line of unusedLineIndices) {
|
||||
sortedLines[line] = DELETED_LINE;
|
||||
sortedLines[line] = "";
|
||||
needsRecurse = true;
|
||||
}
|
||||
}
|
||||
if (thisDeclaration) {
|
||||
var onlyCommentsBeforeThis = true;
|
||||
for (const [i, line] of sortedLines.entries()) {
|
||||
if (i >= thisDeclaration.index) {
|
||||
break;
|
||||
}
|
||||
if (line === "" || line === DELETED_LINE) {
|
||||
continue;
|
||||
}
|
||||
if (!line.startsWith("//")) {
|
||||
onlyCommentsBeforeThis = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!onlyCommentsBeforeThis) {
|
||||
sortedLines[thisDeclaration.index] = DELETED_LINE;
|
||||
let firstNonFileCommentLine = 0;
|
||||
for (const line of sortedLines) {
|
||||
if (line.startsWith("//!")) {
|
||||
firstNonFileCommentLine++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const insert = [thisDeclaration.whole, ""];
|
||||
if (firstNonFileCommentLine > 0) insert.unshift("");
|
||||
sortedLines.splice(firstNonFileCommentLine, 0, ...insert);
|
||||
}
|
||||
}
|
||||
fileContents = sortedLines.join("\n");
|
||||
}
|
||||
|
||||
// Remove deleted lines
|
||||
fileContents = fileContents.replaceAll(DELETED_LINE + "\n", "");
|
||||
// fileContents = fileContents.replaceAll(DELETED_LINE, ""); // any remaining lines
|
||||
|
||||
// Remove any leading newlines
|
||||
fileContents = fileContents.replace(/^\n+/, "");
|
||||
|
||||
@@ -393,6 +343,7 @@ async function processFile(filePath: string): Promise<void> {
|
||||
if (fileContents === "\n") fileContents = "";
|
||||
|
||||
if (fileContents === originalFileContents) {
|
||||
console.log(`✓ No changes: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -418,7 +369,7 @@ async function main() {
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${path.join(filePath, file)}:\n`, error);
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@@ -429,7 +380,7 @@ async function main() {
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}:\n`, error);
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2702,14 +2702,7 @@ export function reportAnnotationToBuildKite({ context, label, content, style = "
|
||||
source: "buildkite",
|
||||
level: "error",
|
||||
});
|
||||
reportAnnotationToBuildKite({
|
||||
context,
|
||||
label: `${label}-error`,
|
||||
content: errorContent,
|
||||
style,
|
||||
priority,
|
||||
attempt: attempt + 1,
|
||||
});
|
||||
reportAnnotationToBuildKite({ label: `${label}-error`, content: errorContent, attempt: attempt + 1 });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2857,14 +2850,6 @@ export function printEnvironment() {
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isLinux) {
|
||||
startGroup("Memory", () => {
|
||||
const shell = which(["sh", "bash"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "free -m -w"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isWindows) {
|
||||
startGroup("Disk (win)", () => {
|
||||
const shell = which(["pwsh"]);
|
||||
@@ -2872,14 +2857,6 @@ export function printEnvironment() {
|
||||
spawnSync([shell, "-c", "get-psdrive"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
startGroup("Memory", () => {
|
||||
const shell = which(["pwsh"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "Get-Counter '\\Memory\\Available MBytes'"], { stdio: "inherit" });
|
||||
console.log();
|
||||
spawnSync([shell, "-c", "Get-CimInstance Win32_PhysicalMemory"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
/**
|
||||
* yocto-queue@1.2.1
|
||||
* https://github.com/sindresorhus/yocto-queue
|
||||
* MIT (c) Sindre Sorhus
|
||||
*/
|
||||
|
||||
/*
|
||||
How it works:
|
||||
`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.
|
||||
*/
|
||||
|
||||
class Node {
|
||||
value;
|
||||
next;
|
||||
|
||||
constructor(value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
export default class Queue {
|
||||
#head;
|
||||
#tail;
|
||||
#size;
|
||||
|
||||
constructor() {
|
||||
this.clear();
|
||||
}
|
||||
|
||||
enqueue(value) {
|
||||
const node = new Node(value);
|
||||
|
||||
if (this.#head) {
|
||||
this.#tail.next = node;
|
||||
this.#tail = node;
|
||||
} else {
|
||||
this.#head = node;
|
||||
this.#tail = node;
|
||||
}
|
||||
|
||||
this.#size++;
|
||||
}
|
||||
|
||||
dequeue() {
|
||||
const current = this.#head;
|
||||
if (!current) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#head = this.#head.next;
|
||||
this.#size--;
|
||||
return current.value;
|
||||
}
|
||||
|
||||
peek() {
|
||||
if (!this.#head) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this.#head.value;
|
||||
|
||||
// TODO: Node.js 18.
|
||||
// return this.#head?.value;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.#head = undefined;
|
||||
this.#tail = undefined;
|
||||
this.#size = 0;
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.#size;
|
||||
}
|
||||
|
||||
*[Symbol.iterator]() {
|
||||
let current = this.#head;
|
||||
|
||||
while (current) {
|
||||
yield current.value;
|
||||
current = current.next;
|
||||
}
|
||||
}
|
||||
|
||||
*drain() {
|
||||
while (this.#head) {
|
||||
yield this.dequeue();
|
||||
}
|
||||
}
|
||||
}
|
||||
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
/**
|
||||
* Removes unreferenced top-level const declarations from a Zig file
|
||||
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
|
||||
*/
|
||||
export function removeUnreferencedImports(content: string): string {
|
||||
let modified = true;
|
||||
let result = content;
|
||||
|
||||
// Keep iterating until no more changes are made
|
||||
while (modified) {
|
||||
modified = false;
|
||||
const lines = result.split("\n");
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Match top-level const declarations: const <IDENTIFIER> = ...
|
||||
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
|
||||
|
||||
if (constMatch) {
|
||||
const identifier = constMatch[1];
|
||||
const assignmentPart = constMatch[2];
|
||||
|
||||
// Skip lines that contain '{' in the assignment (likely structs/objects)
|
||||
if (assignmentPart.includes("{")) {
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this identifier is referenced anywhere else in the file
|
||||
const isReferenced = isIdentifierReferenced(identifier, lines, i);
|
||||
|
||||
if (!isReferenced) {
|
||||
// Skip this line (delete it)
|
||||
modified = true;
|
||||
console.log(`Removing unreferenced import: ${identifier}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
newLines.push(line);
|
||||
}
|
||||
|
||||
result = newLines.join("\n");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an identifier is referenced anywhere in the file except at the declaration line
|
||||
*/
|
||||
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
|
||||
// Create a regex that matches the identifier as a whole word
|
||||
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
|
||||
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
// Skip the declaration line itself
|
||||
if (i === declarationLineIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const line = lines[i];
|
||||
|
||||
// Check if the identifier appears in this line
|
||||
if (identifierRegex.test(line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special regex characters in a string
|
||||
*/
|
||||
function escapeRegex(string: string): string {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single Zig file
|
||||
*/
|
||||
export function processZigFile(filePath: string): void {
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
const cleaned = removeUnreferencedImports(content);
|
||||
|
||||
if (content !== cleaned) {
|
||||
fs.writeFileSync(filePath, cleaned);
|
||||
console.log(`Cleaned: ${filePath}`);
|
||||
} else {
|
||||
console.log(`No changes: ${filePath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing ${filePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple Zig files or directories
|
||||
*/
|
||||
export function processFiles(paths: string[]): void {
|
||||
for (const inputPath of paths) {
|
||||
const stat = fs.statSync(inputPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// Process all .zig files in directory recursively
|
||||
processDirectory(inputPath);
|
||||
} else if (inputPath.endsWith(".zig")) {
|
||||
processZigFile(inputPath);
|
||||
} else {
|
||||
console.warn(`Skipping non-Zig file: ${inputPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively process all .zig files in a directory
|
||||
*/
|
||||
function processDirectory(dirPath: string): void {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
processDirectory(fullPath);
|
||||
} else if (entry.name.endsWith(".zig")) {
|
||||
processZigFile(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CLI usage
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
|
||||
console.log("");
|
||||
console.log("Examples:");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
processFiles(args);
|
||||
}
|
||||
@@ -1,4 +1,12 @@
|
||||
const Global = @This();
|
||||
const std = @import("std");
|
||||
const Environment = @import("./env.zig");
|
||||
|
||||
const Output = @import("output.zig");
|
||||
const use_mimalloc = bun.use_mimalloc;
|
||||
const Mimalloc = bun.Mimalloc;
|
||||
const bun = @import("bun");
|
||||
|
||||
const version_string = Environment.version_string;
|
||||
|
||||
/// Does not have the canary tag, because it is exposed in `Bun.version`
|
||||
/// "1.0.0" or "1.0.0-debug"
|
||||
@@ -104,7 +112,6 @@ pub fn isExiting() bool {
|
||||
/// Flushes stdout and stderr (in exit/quick_exit callback) and exits with the given code.
|
||||
pub fn exit(code: u32) noreturn {
|
||||
is_exiting.store(true, .monotonic);
|
||||
_ = @atomicRmw(usize, &bun.analytics.Features.exited, .Add, 1, .monotonic);
|
||||
|
||||
// If we are crashing, allow the crash handler to finish it's work.
|
||||
bun.crash_handler.sleepForeverIfAnotherThreadIsCrashing();
|
||||
@@ -167,10 +174,10 @@ pub const versions = @import("./generated_versions_list.zig");
|
||||
// 2. if I want to configure allocator later
|
||||
pub inline fn configureAllocator(_: AllocatorConfiguration) void {
|
||||
// if (comptime !use_mimalloc) return;
|
||||
// const mimalloc = bun.mimalloc;
|
||||
// mimalloc.mi_option_set_enabled(mimalloc.mi_option_verbose, config.verbose);
|
||||
// mimalloc.mi_option_set_enabled(mimalloc.mi_option_large_os_pages, config.long_running);
|
||||
// if (!config.long_running) mimalloc.mi_option_set(mimalloc.mi_option_reset_delay, 0);
|
||||
// const Mimalloc = @import("./allocators/mimalloc.zig");
|
||||
// Mimalloc.mi_option_set_enabled(Mimalloc.mi_option_verbose, config.verbose);
|
||||
// Mimalloc.mi_option_set_enabled(Mimalloc.mi_option_large_os_pages, config.long_running);
|
||||
// if (!config.long_running) Mimalloc.mi_option_set(Mimalloc.mi_option_reset_delay, 0);
|
||||
}
|
||||
|
||||
pub fn notimpl() noreturn {
|
||||
@@ -184,17 +191,20 @@ pub fn crash() noreturn {
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
const Global = @This();
|
||||
const string = bun.string;
|
||||
|
||||
pub const BunInfo = struct {
|
||||
bun_version: string,
|
||||
platform: analytics.GenerateHeader.GeneratePlatform.Platform,
|
||||
platform: Analytics.GenerateHeader.GeneratePlatform.Platform,
|
||||
|
||||
const analytics = bun.analytics;
|
||||
const JSON = bun.json;
|
||||
const JSAst = bun.ast;
|
||||
const Analytics = @import("./analytics/analytics_thread.zig");
|
||||
const JSON = bun.JSON;
|
||||
const JSAst = bun.JSAst;
|
||||
pub fn generate(comptime Bundler: type, _: Bundler, allocator: std.mem.Allocator) !JSAst.Expr {
|
||||
const info = BunInfo{
|
||||
.bun_version = Global.package_json_version,
|
||||
.platform = analytics.GenerateHeader.GeneratePlatform.forOS(),
|
||||
.platform = Analytics.GenerateHeader.GeneratePlatform.forOS(),
|
||||
};
|
||||
|
||||
return try JSON.toAST(allocator, BunInfo, info);
|
||||
@@ -209,7 +219,7 @@ comptime {
|
||||
}
|
||||
|
||||
pub export fn Bun__onExit() void {
|
||||
bun.jsc.Node.FSEvents.closeAndWait();
|
||||
bun.JSC.Node.FSEvents.closeAndWait();
|
||||
|
||||
runExitCallbacks();
|
||||
Output.flush();
|
||||
@@ -221,15 +231,3 @@ pub export fn Bun__onExit() void {
|
||||
comptime {
|
||||
_ = Bun__onExit;
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const Output = @import("./output.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const version_string = Environment.version_string;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Mimalloc = bun.mimalloc;
|
||||
const use_mimalloc = bun.use_mimalloc;
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
const HTMLScanner = @This();
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
const ImportKind = @import("./import_record.zig").ImportKind;
|
||||
const lol = @import("./deps/lol-html.zig");
|
||||
const logger = bun.logger;
|
||||
const fs = bun.fs;
|
||||
|
||||
allocator: std.mem.Allocator,
|
||||
import_records: ImportRecord.List = .{},
|
||||
@@ -297,12 +303,4 @@ pub fn HTMLProcessor(
|
||||
};
|
||||
}
|
||||
|
||||
const lol = @import("./deps/lol-html.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const ImportKind = @import("./import_record.zig").ImportKind;
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
|
||||
const bun = @import("bun");
|
||||
const fs = bun.fs;
|
||||
const logger = bun.logger;
|
||||
const HTMLScanner = @This();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
const OutputFile = @This();
|
||||
|
||||
// Instead of keeping files in-memory, we:
|
||||
// 1. Write directly to disk
|
||||
// 2. (Optional) move the file to the destination
|
||||
@@ -15,7 +13,7 @@ hash: u64 = 0,
|
||||
is_executable: bool = false,
|
||||
source_map_index: u32 = std.math.maxInt(u32),
|
||||
bytecode_index: u32 = std.math.maxInt(u32),
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
output_kind: JSC.API.BuildArtifact.OutputKind,
|
||||
/// Relative
|
||||
dest_path: []const u8 = "",
|
||||
side: ?bun.bake.Side,
|
||||
@@ -154,14 +152,14 @@ pub const Value = union(Kind) {
|
||||
|
||||
pub const SavedFile = struct {
|
||||
pub fn toJS(
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
path: []const u8,
|
||||
byte_size: usize,
|
||||
) jsc.JSValue {
|
||||
) JSC.JSValue {
|
||||
const mime_type = globalThis.bunVM().mimeType(path);
|
||||
const store = jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{
|
||||
const store = JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{
|
||||
.string = bun.PathString.init(path),
|
||||
},
|
||||
},
|
||||
@@ -169,12 +167,12 @@ pub const SavedFile = struct {
|
||||
bun.default_allocator,
|
||||
) catch unreachable;
|
||||
|
||||
var blob = bun.default_allocator.create(jsc.WebCore.Blob) catch unreachable;
|
||||
blob.* = jsc.WebCore.Blob.initWithStore(store, globalThis);
|
||||
var blob = bun.default_allocator.create(JSC.WebCore.Blob) catch unreachable;
|
||||
blob.* = JSC.WebCore.Blob.initWithStore(store, globalThis);
|
||||
if (mime_type) |mime| {
|
||||
blob.content_type = mime.value;
|
||||
}
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(byte_size));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(byte_size));
|
||||
blob.allocator = bun.default_allocator;
|
||||
return blob.toJS(globalThis);
|
||||
}
|
||||
@@ -215,7 +213,7 @@ pub const Options = struct {
|
||||
size: ?usize = null,
|
||||
input_path: []const u8 = "",
|
||||
display_size: u32 = 0,
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
output_kind: JSC.API.BuildArtifact.OutputKind,
|
||||
is_executable: bool,
|
||||
data: union(enum) {
|
||||
buffer: struct {
|
||||
@@ -289,7 +287,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u
|
||||
}
|
||||
|
||||
var path_buf: bun.PathBuffer = undefined;
|
||||
_ = try jsc.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
||||
_ = try JSC.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
||||
.data = .{ .buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(value.bytes.ptr),
|
||||
@@ -344,20 +342,20 @@ pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: Fil
|
||||
pub fn toJS(
|
||||
this: *OutputFile,
|
||||
owned_pathname: ?[]const u8,
|
||||
globalObject: *jsc.JSGlobalObject,
|
||||
) bun.jsc.JSValue {
|
||||
globalObject: *JSC.JSGlobalObject,
|
||||
) bun.JSC.JSValue {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => .js_undefined,
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.fd = copy.fd,
|
||||
}
|
||||
else
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
||||
globalObject.allocator(),
|
||||
@@ -365,8 +363,8 @@ pub fn toJS(
|
||||
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
|
||||
};
|
||||
|
||||
var build_output = bun.new(jsc.API.BuildArtifact, .{
|
||||
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
var build_output = bun.new(JSC.API.BuildArtifact, .{
|
||||
.blob = JSC.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
.output_kind = this.output_kind,
|
||||
@@ -383,12 +381,12 @@ pub fn toJS(
|
||||
break :brk build_output.toJS(globalObject);
|
||||
},
|
||||
.saved => brk: {
|
||||
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
var build_output = bun.default_allocator.create(JSC.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
const path_to_use = owned_pathname orelse this.src_path.text;
|
||||
|
||||
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
|
||||
},
|
||||
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
||||
globalObject.allocator(),
|
||||
@@ -403,8 +401,8 @@ pub fn toJS(
|
||||
},
|
||||
};
|
||||
|
||||
build_output.* = jsc.API.BuildArtifact{
|
||||
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
build_output.* = JSC.API.BuildArtifact{
|
||||
.blob = JSC.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
.output_kind = this.output_kind,
|
||||
@@ -414,7 +412,7 @@ pub fn toJS(
|
||||
break :brk build_output.toJS(globalObject);
|
||||
},
|
||||
.buffer => |buffer| brk: {
|
||||
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
|
||||
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
|
||||
if (blob.store) |store| {
|
||||
store.mime_type = this.loader.toMimeType(&.{owned_pathname orelse ""});
|
||||
blob.content_type = store.mime_type.value;
|
||||
@@ -422,10 +420,10 @@ pub fn toJS(
|
||||
blob.content_type = this.loader.toMimeType(&.{owned_pathname orelse ""}).value;
|
||||
}
|
||||
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
|
||||
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
build_output.* = jsc.API.BuildArtifact{
|
||||
var build_output = bun.default_allocator.create(JSC.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
build_output.* = JSC.API.BuildArtifact{
|
||||
.blob = blob,
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
@@ -448,20 +446,20 @@ pub fn toJS(
|
||||
pub fn toBlob(
|
||||
this: *OutputFile,
|
||||
allocator: std.mem.Allocator,
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
) !jsc.WebCore.Blob {
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
) !JSC.WebCore.Blob {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => @panic("Cannot convert noop output file to blob"),
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
||||
const file_blob = try JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.fd = copy.fd,
|
||||
}
|
||||
else
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
||||
allocator,
|
||||
@@ -474,12 +472,12 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
break :brk JSC.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
},
|
||||
.saved => brk: {
|
||||
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
|
||||
const file_blob = try JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
||||
allocator,
|
||||
@@ -492,10 +490,10 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
break :brk JSC.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
},
|
||||
.buffer => |buffer| brk: {
|
||||
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
|
||||
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
|
||||
if (blob.store) |store| {
|
||||
store.mime_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text });
|
||||
blob.content_type = store.mime_type.value;
|
||||
@@ -510,22 +508,22 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
break :brk blob;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const OutputFile = @This();
|
||||
const string = []const u8;
|
||||
|
||||
const resolve_path = @import("./resolver/resolve_path.zig");
|
||||
const resolver = @import("./resolver/resolver.zig");
|
||||
const std = @import("std");
|
||||
const Loader = @import("./options.zig").Loader;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const FileDescriptorType = bun.FileDescriptor;
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const Fs = bun.fs;
|
||||
const jsc = bun.jsc;
|
||||
const Output = bun.Global.Output;
|
||||
const Loader = @import("./options.zig").Loader;
|
||||
const resolver = @import("./resolver/resolver.zig");
|
||||
const resolve_path = @import("./resolver/resolve_path.zig");
|
||||
const Output = @import("./Global.zig").Output;
|
||||
const Environment = bun.Environment;
|
||||
|
||||
@@ -14,7 +14,12 @@
|
||||
//! * `refresh_rate_ms`
|
||||
//! * `initial_delay_ms`
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const windows = std.os.windows;
|
||||
const assert = bun.assert;
|
||||
const Progress = @This();
|
||||
const bun = @import("bun");
|
||||
|
||||
/// `null` if the current node (and its children) should
|
||||
/// not print on update()
|
||||
@@ -448,10 +453,3 @@ test "basic functionality" {
|
||||
node.end();
|
||||
}
|
||||
}
|
||||
|
||||
const builtin = @import("builtin");
|
||||
const std = @import("std");
|
||||
const windows = std.os.windows;
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
@@ -1,6 +1,41 @@
|
||||
//! Originally, we tried using LIEF to inject the module graph into a MachO segment
|
||||
//! But this incurred a fixed 350ms overhead on every build, which is unacceptable
|
||||
//! so we give up on codesigning support on macOS for now until we can find a better solution
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const Schema = bun.Schema.Api;
|
||||
const strings = bun.strings;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const Syscall = bun.sys;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
|
||||
const macho = bun.macho;
|
||||
const pe = bun.pe;
|
||||
const w = std.os.windows;
|
||||
|
||||
const StandaloneError = error{
|
||||
TempFileFailed,
|
||||
CopyFailed,
|
||||
OpenFailed,
|
||||
ReadFailed,
|
||||
WriteFailed,
|
||||
SeekFailed,
|
||||
MachoInitFailed,
|
||||
MachoWriteFailed,
|
||||
PEInitFailed,
|
||||
PEWriteFailed,
|
||||
DownloadFailed,
|
||||
GetSelfExePathFailed,
|
||||
MoveFailed,
|
||||
DisableConsoleFailed,
|
||||
OutOfMemory,
|
||||
InvalidSourceMap,
|
||||
FileNotFound,
|
||||
@"Corrupted module graph: entry point ID is greater than module list count",
|
||||
};
|
||||
|
||||
pub const StandaloneModuleGraph = struct {
|
||||
bytes: []const u8 = "",
|
||||
@@ -182,7 +217,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
return this.wtf_string.dupeRef();
|
||||
}
|
||||
|
||||
pub fn blob(this: *File, globalObject: *bun.jsc.JSGlobalObject) *bun.webcore.Blob {
|
||||
pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.webcore.Blob {
|
||||
if (this.cached_blob == null) {
|
||||
const store = bun.webcore.Blob.Store.init(@constCast(this.contents), bun.default_allocator);
|
||||
// make it never free
|
||||
@@ -345,7 +380,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
var entry_point_id: ?usize = null;
|
||||
var string_builder = bun.StringBuilder{};
|
||||
var module_count: usize = 0;
|
||||
for (output_files) |output_file| {
|
||||
std.debug.print("[DEBUG] toBytes - processing {d} output files\n", .{output_files.len});
|
||||
for (output_files, 0..) |output_file, i| {
|
||||
std.debug.print("[DEBUG] toBytes - file {d}: dest_path={s}, output_kind={}, side={?}, value={}\n", .{ i, output_file.dest_path, output_file.output_kind, output_file.side, output_file.value });
|
||||
string_builder.countZ(output_file.dest_path);
|
||||
string_builder.countZ(prefix);
|
||||
if (output_file.value == .buffer) {
|
||||
@@ -360,10 +397,15 @@ pub const StandaloneModuleGraph = struct {
|
||||
string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256;
|
||||
} else {
|
||||
if (entry_point_id == null) {
|
||||
if (output_file.side == null or output_file.side.? == .server) {
|
||||
std.debug.print("[DEBUG] toBytes - checking entry-point: side={?}, output_kind={}\n", .{ output_file.side, output_file.output_kind });
|
||||
// For standalone executables, accept client-side entry points as well as server-side
|
||||
if (output_file.side == null or output_file.side.? == .server or output_file.side.? == .client) {
|
||||
if (output_file.output_kind == .@"entry-point") {
|
||||
std.debug.print("[DEBUG] toBytes - setting entry_point_id = {d}\n", .{module_count});
|
||||
entry_point_id = module_count;
|
||||
}
|
||||
} else {
|
||||
std.debug.print("[DEBUG] toBytes - skipping entry-point due to side: {?}\n", .{output_file.side});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -373,7 +415,11 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (module_count == 0 or entry_point_id == null) return &[_]u8{};
|
||||
std.debug.print("[DEBUG] toBytes - module_count: {d}, entry_point_id: {?}\n", .{ module_count, entry_point_id });
|
||||
if (module_count == 0 or entry_point_id == null) {
|
||||
std.debug.print("[DEBUG] toBytes - returning empty array because module_count={d} or entry_point_id={?}\n", .{ module_count, entry_point_id });
|
||||
return &[_]u8{};
|
||||
}
|
||||
|
||||
string_builder.cap += @sizeOf(CompiledModuleGraphFile) * output_files.len;
|
||||
string_builder.cap += trailer.len;
|
||||
@@ -491,12 +537,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
windows_hide_console: bool = false,
|
||||
};
|
||||
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) bun.FileDescriptor {
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) anyerror!bun.FileDescriptor {
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get temporary file name: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
});
|
||||
var zname: [:0]const u8 = bun.span(try bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))));
|
||||
|
||||
const cleanup = struct {
|
||||
pub fn toClean(name: [:0]const u8, fd: bun.FileDescriptor) void {
|
||||
@@ -523,10 +566,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
out_buf[zname.len] = 0;
|
||||
const out = out_buf[0..zname.len :0];
|
||||
|
||||
bun.copyFile(in, out).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
try bun.copyFile(in, out).unwrap();
|
||||
const file = bun.sys.openFileAtWindows(
|
||||
bun.invalid_fd,
|
||||
out,
|
||||
@@ -535,10 +575,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.disposition = w.FILE_OPEN,
|
||||
.options = w.FILE_SYNCHRONOUS_IO_NONALERT | w.FILE_OPEN_REPARSE_POINT,
|
||||
},
|
||||
).unwrap() catch |e| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open temporary file to copy bun into\n{}", .{e});
|
||||
Global.exit(1);
|
||||
};
|
||||
).unwrap() catch |err| return err;
|
||||
|
||||
break :brk file;
|
||||
}
|
||||
@@ -590,8 +627,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
else => break,
|
||||
}
|
||||
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open temporary file to copy bun into\n{}", .{err});
|
||||
Global.exit(1);
|
||||
return err.toZigErr();
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -611,9 +647,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
}
|
||||
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open bun executable to copy from as read-only\n{}", .{err});
|
||||
cleanup(zname, fd);
|
||||
Global.exit(1);
|
||||
return err.toZigErr();
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -623,9 +658,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
defer self_fd.close();
|
||||
|
||||
bun.copyFile(self_fd, fd).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
cleanup(zname, fd);
|
||||
Global.exit(1);
|
||||
return err;
|
||||
};
|
||||
break :brk fd;
|
||||
};
|
||||
@@ -718,10 +752,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
};
|
||||
// Set executable permissions when running on POSIX hosts, even for Windows targets
|
||||
if (comptime !Environment.isWindows) {
|
||||
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
|
||||
}
|
||||
return cloned_executable_fd;
|
||||
},
|
||||
else => {
|
||||
@@ -833,26 +863,23 @@ pub const StandaloneModuleGraph = struct {
|
||||
output_format: bun.options.Format,
|
||||
windows_hide_console: bool,
|
||||
windows_icon: ?[]const u8,
|
||||
) !void {
|
||||
) anyerror!void {
|
||||
std.debug.print("[DEBUG] StandaloneModuleGraph.toExecutable entry - outfile: {s}\n", .{outfile});
|
||||
const bytes = try toBytes(allocator, module_prefix, output_files, output_format);
|
||||
std.debug.print("[DEBUG] toBytes returned {d} bytes\n", .{bytes.len});
|
||||
if (bytes.len == 0) return;
|
||||
|
||||
const fd = inject(
|
||||
const fd = try inject(
|
||||
bytes,
|
||||
if (target.isDefault())
|
||||
bun.selfExePath() catch |err| {
|
||||
Output.err(err, "failed to get self executable path", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
try bun.selfExePath()
|
||||
else
|
||||
download(allocator, target, env) catch |err| {
|
||||
Output.err(err, "failed to download cross-compiled bun executable", .{});
|
||||
Global.exit(1);
|
||||
},
|
||||
try download(allocator, target, env),
|
||||
.{ .windows_hide_console = windows_hide_console },
|
||||
target,
|
||||
);
|
||||
bun.debugAssert(fd.kind == .system);
|
||||
std.debug.print("[DEBUG] After inject, about to check Environment.isWindows: {}\n", .{Environment.isWindows});
|
||||
|
||||
if (Environment.isWindows) {
|
||||
var outfile_buf: bun.OSPathBuffer = undefined;
|
||||
@@ -873,7 +900,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
_ = bun.windows.deleteOpenedFile(fd);
|
||||
|
||||
Global.exit(1);
|
||||
return err;
|
||||
};
|
||||
fd.close();
|
||||
|
||||
@@ -888,18 +915,28 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const temp_location = bun.getFdPath(fd, &buf) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get path for fd: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
const temp_location = bun.getFdPath(fd, &buf) catch |err| return err;
|
||||
|
||||
const dest_basename = std.fs.path.basename(outfile);
|
||||
std.debug.print("[DEBUG] toExecutable - temp_location: {s}\n", .{temp_location});
|
||||
std.debug.print("[DEBUG] toExecutable - outfile: {s}\n", .{outfile});
|
||||
std.debug.print("[DEBUG] toExecutable - dest_basename: {s}\n", .{dest_basename});
|
||||
|
||||
// Check the size of the temporary file before moving
|
||||
if (std.fs.cwd().statFile(temp_location)) |temp_stat| {
|
||||
std.debug.print("[DEBUG] toExecutable - temp file size: {d} bytes\n", .{temp_stat.size});
|
||||
} else |err| {
|
||||
std.debug.print("[DEBUG] toExecutable - failed to stat temp file: {}\n", .{err});
|
||||
}
|
||||
|
||||
bun.sys.moveFileZWithHandle(
|
||||
fd,
|
||||
bun.FD.cwd(),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(temp_location)), 0),
|
||||
.fromStdDir(root_dir),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(dest_basename)), 0),
|
||||
) catch |err| {
|
||||
std.debug.print("[DEBUG] toExecutable - moveFileZWithHandle failed: {}\n", .{err});
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
|
||||
} else {
|
||||
@@ -909,7 +946,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
&(try std.posix.toPosixPath(temp_location)),
|
||||
);
|
||||
|
||||
Global.exit(1);
|
||||
return StandaloneError.MoveFailed;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1226,13 +1263,13 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
// the allocator given to the JS parser is not respected for all parts
|
||||
// of the parse, so we need to remember to reset the ast store
|
||||
bun.ast.Expr.Data.Store.reset();
|
||||
bun.ast.Stmt.Data.Store.reset();
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
bun.JSAst.Stmt.Data.Store.reset();
|
||||
defer {
|
||||
bun.ast.Expr.Data.Store.reset();
|
||||
bun.ast.Stmt.Data.Store.reset();
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
bun.JSAst.Stmt.Data.Store.reset();
|
||||
}
|
||||
var json = bun.json.parse(&json_src, &log, arena, false) catch
|
||||
var json = bun.JSON.parse(&json_src, &log, arena, false) catch
|
||||
return error.InvalidSourceMap;
|
||||
|
||||
const mappings_str = json.get("mappings") orelse
|
||||
@@ -1310,18 +1347,3 @@ pub const StandaloneModuleGraph = struct {
|
||||
bun.assert(header_list.items.len == string_payload_start_location);
|
||||
}
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const w = std.os.windows;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const Output = bun.Output;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
const Syscall = bun.sys;
|
||||
const macho = bun.macho;
|
||||
const pe = bun.pe;
|
||||
const strings = bun.strings;
|
||||
const Schema = bun.schema.api;
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
// https://github.com/lithdew/rheia/blob/162293d0f0e8d6572a8954c0add83f13f76b3cc6/hash_map.zig
|
||||
// Apache License 2.0
|
||||
const std = @import("std");
|
||||
|
||||
const mem = std.mem;
|
||||
const math = std.math;
|
||||
const testing = std.testing;
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
|
||||
return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage);
|
||||
@@ -777,11 +785,3 @@ test "SortedHashMap: collision test" {
|
||||
try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?);
|
||||
try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?);
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
const std = @import("std");
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
//! Bun's cross-platform filesystem watcher. Runs on its own thread.
|
||||
|
||||
const Watcher = @This();
|
||||
|
||||
const DebugLogScope = bun.Output.Scoped(.watcher, false);
|
||||
const log = DebugLogScope.log;
|
||||
|
||||
@@ -128,6 +126,7 @@ pub fn getHash(filepath: string) HashType {
|
||||
|
||||
pub const WatchItemIndex = u16;
|
||||
pub const max_eviction_count = 8096;
|
||||
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
|
||||
// TODO: some platform-specific behavior is implemented in
|
||||
// this file instead of the platform-specific file.
|
||||
// ideally, the constants above can be inlined
|
||||
@@ -289,7 +288,7 @@ pub fn flushEvictions(this: *Watcher) void {
|
||||
}
|
||||
}
|
||||
|
||||
fn watchLoop(this: *Watcher) bun.sys.Maybe(void) {
|
||||
fn watchLoop(this: *Watcher) bun.JSC.Maybe(void) {
|
||||
while (this.running) {
|
||||
// individual platform implementation will call onFileUpdate
|
||||
switch (Platform.watchLoopCycle(this)) {
|
||||
@@ -297,7 +296,7 @@ fn watchLoop(this: *Watcher) bun.sys.Maybe(void) {
|
||||
.result => |iter| iter,
|
||||
}
|
||||
}
|
||||
return .success;
|
||||
return .{ .result = {} };
|
||||
}
|
||||
|
||||
fn appendFileAssumeCapacity(
|
||||
@@ -309,13 +308,13 @@ fn appendFileAssumeCapacity(
|
||||
parent_hash: HashType,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.sys.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
const rel = bun.path.isParentOrEqual(this.fs.top_level_dir, file_path);
|
||||
if (rel == .unrelated) {
|
||||
Output.warn("File {s} is not in the project directory and will not be watched\n", .{file_path});
|
||||
return .success;
|
||||
return .{ .result = {} };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -382,7 +381,7 @@ fn appendFileAssumeCapacity(
|
||||
}
|
||||
|
||||
this.watchlist.appendAssumeCapacity(item);
|
||||
return .success;
|
||||
return .{ .result = {} };
|
||||
}
|
||||
fn appendDirectoryAssumeCapacity(
|
||||
this: *Watcher,
|
||||
@@ -390,7 +389,7 @@ fn appendDirectoryAssumeCapacity(
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.sys.Maybe(WatchItemIndex) {
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
const rel = bun.path.isParentOrEqual(this.fs.top_level_dir, file_path);
|
||||
@@ -501,7 +500,7 @@ pub fn appendFileMaybeLock(
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
comptime lock: bool,
|
||||
) bun.sys.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime lock) this.mutex.lock();
|
||||
defer if (comptime lock) this.mutex.unlock();
|
||||
bun.assert(file_path.len > 1);
|
||||
@@ -561,7 +560,7 @@ pub fn appendFileMaybeLock(
|
||||
});
|
||||
}
|
||||
|
||||
return .success;
|
||||
return .{ .result = {} };
|
||||
}
|
||||
|
||||
inline fn isEligibleDirectory(this: *Watcher, dir: string) bool {
|
||||
@@ -577,7 +576,7 @@ pub fn appendFile(
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.sys.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, true);
|
||||
}
|
||||
|
||||
@@ -587,7 +586,7 @@ pub fn addDirectory(
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.sys.Maybe(WatchItemIndex) {
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
this.mutex.lock();
|
||||
defer this.mutex.unlock();
|
||||
|
||||
@@ -609,7 +608,7 @@ pub fn addFile(
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.sys.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
// This must lock due to concurrent transpiler
|
||||
this.mutex.lock();
|
||||
defer this.mutex.unlock();
|
||||
@@ -622,7 +621,7 @@ pub fn addFile(
|
||||
fds[index] = fd;
|
||||
}
|
||||
}
|
||||
return .success;
|
||||
return .{ .result = {} };
|
||||
}
|
||||
|
||||
return this.appendFileMaybeLock(fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, false);
|
||||
@@ -674,16 +673,13 @@ pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: bun.Sto
|
||||
}
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
|
||||
const options = @import("./options.zig");
|
||||
const std = @import("std");
|
||||
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const Mutex = bun.Mutex;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const options = @import("./options.zig");
|
||||
const Mutex = bun.Mutex;
|
||||
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
pub const c_allocator = @import("./allocators/basic.zig").c_allocator;
|
||||
pub const z_allocator = @import("./allocators/basic.zig").z_allocator;
|
||||
pub const mimalloc = @import("./allocators/mimalloc.zig");
|
||||
pub const MimallocArena = @import("./allocators/MimallocArena.zig");
|
||||
pub const AllocationScope = @import("./allocators/AllocationScope.zig");
|
||||
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
|
||||
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
|
||||
pub const MemoryReportingAllocator = @import("./allocators/MemoryReportingAllocator.zig");
|
||||
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
@@ -294,6 +290,8 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
};
|
||||
}
|
||||
|
||||
const Mutex = bun.Mutex;
|
||||
|
||||
/// Append-only list.
|
||||
/// Stores an initial count in .bss section of the object file
|
||||
/// Overflows to heap when count is exceeded.
|
||||
@@ -771,10 +769,3 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
const Mutex = bun.threading.Mutex;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
|
||||
const AllocationScope = @This();
|
||||
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
@@ -216,7 +215,7 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add
|
||||
/// Call when the pointer from `trackExternalAllocation` is freed.
|
||||
/// Returns true if the free was invalid.
|
||||
pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool {
|
||||
if (comptime !enabled) return false;
|
||||
if (comptime !enabled) return;
|
||||
const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) {
|
||||
.pointer => |p| switch (p.size) {
|
||||
.slice => brk: {
|
||||
@@ -254,7 +253,6 @@ pub inline fn downcast(a: Allocator) ?*AllocationScope {
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Output = bun.Output;
|
||||
const StoredTrace = bun.crash_handler.StoredTrace;
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
//! When cloning large amounts of data potentially multiple times, we can
|
||||
//! leverage copy-on-write memory to avoid actually copying the data. To do that
|
||||
//! on Linux, we need to use a memfd, which is a Linux-specific feature.
|
||||
//!
|
||||
//! The steps are roughly:
|
||||
//!
|
||||
//! 1. Create a memfd
|
||||
//! 2. Write the data to the memfd
|
||||
//! 3. Map the memfd into memory
|
||||
//!
|
||||
//! Then, to clone the data later, we can just call `mmap` again.
|
||||
//!
|
||||
//! The big catch is that mmap(), memfd_create(), write() all have overhead. And
|
||||
//! often we will re-use virtual memory within the process. This does not reuse
|
||||
//! the virtual memory. So we should only really use this for large blobs of
|
||||
//! data that we expect to be cloned multiple times. Such as Blob in FormData.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{});
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
ref_count: RefCount,
|
||||
fd: bun.FileDescriptor = .invalid,
|
||||
size: usize = 0,
|
||||
|
||||
var memfd_counter = std.atomic.Value(usize).init(0);
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
self.fd.close();
|
||||
bun.destroy(self);
|
||||
}
|
||||
|
||||
pub fn allocator(self: *Self) std.mem.Allocator {
|
||||
return .{
|
||||
.ptr = self,
|
||||
.vtable = AllocatorInterface.VTable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn from(allocator_: std.mem.Allocator) ?*Self {
|
||||
if (allocator_.vtable == AllocatorInterface.VTable) {
|
||||
return @alignCast(@ptrCast(allocator_.ptr));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const AllocatorInterface = struct {
|
||||
fn alloc(_: *anyopaque, _: usize, _: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
// it should perform no allocations or resizes
|
||||
return null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
ptr: *anyopaque,
|
||||
buf: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
var self: *Self = @alignCast(@ptrCast(ptr));
|
||||
defer self.deref();
|
||||
bun.sys.munmap(@alignCast(@ptrCast(buf))).unwrap() catch |err| {
|
||||
bun.Output.debugWarn("Failed to munmap memfd: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
pub const VTable = &std.mem.Allocator.VTable{
|
||||
.alloc = &AllocatorInterface.alloc,
|
||||
.resize = &std.mem.Allocator.noResize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &free,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn alloc(self: *Self, len: usize, offset: usize, flags: std.posix.MAP) bun.sys.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
var size = len;
|
||||
|
||||
// size rounded up to nearest page
|
||||
size = std.mem.alignForward(usize, size, std.heap.pageSize());
|
||||
|
||||
var flags_mut = flags;
|
||||
flags_mut.TYPE = .SHARED;
|
||||
|
||||
switch (bun.sys.mmap(
|
||||
null,
|
||||
@min(size, self.size),
|
||||
std.posix.PROT.READ | std.posix.PROT.WRITE,
|
||||
flags_mut,
|
||||
self.fd,
|
||||
offset,
|
||||
)) {
|
||||
.result => |slice| {
|
||||
return .{
|
||||
.result = bun.webcore.Blob.Store.Bytes{
|
||||
.cap = @truncate(slice.len),
|
||||
.ptr = slice.ptr,
|
||||
.len = @truncate(len),
|
||||
.allocator = self.allocator(),
|
||||
},
|
||||
};
|
||||
},
|
||||
.err => |errno| {
|
||||
return .{ .err = errno };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shouldUse(bytes: []const u8) bool {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (bun.jsc.VirtualMachine.is_smol_mode) {
|
||||
return bytes.len >= 1024 * 1024 * 1;
|
||||
}
|
||||
|
||||
// This is a net 2x - 4x slowdown to new Blob([huge])
|
||||
// so we must be careful
|
||||
return bytes.len >= 1024 * 1024 * 8;
|
||||
}
|
||||
|
||||
pub fn create(bytes: []const u8) bun.sys.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var label_buf: [128]u8 = undefined;
|
||||
const label = std.fmt.bufPrintZ(&label_buf, "memfd-num-{d}", .{memfd_counter.fetchAdd(1, .monotonic)}) catch "";
|
||||
|
||||
// Using huge pages was slower.
|
||||
const fd = switch (bun.sys.memfd_create(label, std.os.linux.MFD.CLOEXEC)) {
|
||||
.err => |err| return .{ .err = bun.sys.Error.fromCode(err.getErrno(), .open) },
|
||||
.result => |fd| fd,
|
||||
};
|
||||
|
||||
if (bytes.len > 0)
|
||||
// Hint at the size of the file
|
||||
_ = bun.sys.ftruncate(fd, @intCast(bytes.len));
|
||||
|
||||
// Dump all the bytes in there
|
||||
var written: isize = 0;
|
||||
|
||||
var remain = bytes;
|
||||
while (remain.len > 0) {
|
||||
switch (bun.sys.pwrite(fd, remain, written)) {
|
||||
.err => |err| {
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bun.Output.debugWarn("Failed to write to memfd: {}", .{err});
|
||||
fd.close();
|
||||
return .{ .err = err };
|
||||
},
|
||||
.result => |result| {
|
||||
if (result == 0) {
|
||||
bun.Output.debugWarn("Failed to write to memfd: EOF", .{});
|
||||
fd.close();
|
||||
return .{ .err = bun.sys.Error.fromCode(.NOMEM, .write) };
|
||||
}
|
||||
written += @intCast(result);
|
||||
remain = remain[result..];
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var linux_memfd_allocator = Self.new(.{
|
||||
.fd = fd,
|
||||
.ref_count = .init(),
|
||||
.size = bytes.len,
|
||||
});
|
||||
|
||||
switch (linux_memfd_allocator.alloc(bytes.len, 0, .{ .TYPE = .SHARED })) {
|
||||
.result => |res| {
|
||||
return .{ .result = res };
|
||||
},
|
||||
.err => |err| {
|
||||
linux_memfd_allocator.deref();
|
||||
return .{ .err = err };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isInstance(allocator_: std.mem.Allocator) bool {
|
||||
return allocator_.vtable == AllocatorInterface.VTable;
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
@@ -1,58 +0,0 @@
|
||||
//! Single allocation only.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
array_list: std.ArrayListAligned(u8, @alignOf(std.c.max_align_t)),
|
||||
|
||||
fn alloc(ptr: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
bun.assert(alignment.toByteUnits() <= @alignOf(std.c.max_align_t));
|
||||
var self = bun.cast(*Self, ptr);
|
||||
self.array_list.items.len = 0;
|
||||
self.array_list.ensureTotalCapacity(len) catch return null;
|
||||
self.array_list.items.len = len;
|
||||
return self.array_list.items.ptr;
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
|
||||
_ = new_len;
|
||||
_ = buf;
|
||||
@panic("not implemented");
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
_: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {}
|
||||
|
||||
pub fn reset(self: *Self) void {
|
||||
self.array_list.items.len = 0;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.array_list.deinit();
|
||||
}
|
||||
|
||||
const vtable = std.mem.Allocator.VTable{
|
||||
.alloc = &alloc,
|
||||
.free = &free,
|
||||
.resize = &resize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
};
|
||||
|
||||
pub fn init(self: *Self, allocator: std.mem.Allocator) std.mem.Allocator {
|
||||
self.array_list = .init(allocator);
|
||||
|
||||
return std.mem.Allocator{
|
||||
.ptr = self,
|
||||
.vtable = &vtable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isInstance(allocator: std.mem.Allocator) bool {
|
||||
return allocator.vtable == &vtable;
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
@@ -1,5 +1,4 @@
|
||||
const MemoryReportingAllocator = @This();
|
||||
|
||||
const log = bun.Output.scoped(.MEM, false);
|
||||
|
||||
child_allocator: std.mem.Allocator,
|
||||
@@ -77,10 +76,6 @@ pub inline fn assert(this: *const MemoryReportingAllocator) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isInstance(allocator_: std.mem.Allocator) bool {
|
||||
return allocator_.vtable == &VTable;
|
||||
}
|
||||
|
||||
pub const VTable = std.mem.Allocator.VTable{
|
||||
.alloc = &alloc,
|
||||
.resize = &resize,
|
||||
@@ -89,8 +84,7 @@ pub const VTable = std.mem.Allocator.VTable{
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const jsc = bun.jsc;
|
||||
|
||||
@@ -1,176 +0,0 @@
|
||||
const Self = @This();
|
||||
|
||||
heap: ?*mimalloc.Heap = null,
|
||||
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadlocalDefault() Allocator {
|
||||
return Allocator{ .ptr = mimalloc.mi_heap_get_default(), .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn backingAllocator(self: Self) Allocator {
|
||||
var arena = Self{ .heap = self.heap.?.backing() };
|
||||
return arena.allocator();
|
||||
}
|
||||
|
||||
pub fn allocator(self: Self) Allocator {
|
||||
@setRuntimeSafety(false);
|
||||
return Allocator{ .ptr = self.heap.?, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn dumpThreadStats(self: *Self) void {
|
||||
_ = self;
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_thread_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn dumpStats(self: *Self) void {
|
||||
_ = self;
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
mimalloc.mi_heap_destroy(bun.take(&self.heap).?);
|
||||
}
|
||||
pub fn init() !Self {
|
||||
return .{ .heap = mimalloc.mi_heap_new() orelse return error.OutOfMemory };
|
||||
}
|
||||
|
||||
pub fn gc(self: Self) void {
|
||||
mimalloc.mi_heap_collect(self.heap orelse return, false);
|
||||
}
|
||||
|
||||
pub inline fn helpCatchMemoryIssues(self: Self) void {
|
||||
if (comptime FeatureFlags.help_catch_memory_issues) {
|
||||
self.gc();
|
||||
bun.mimalloc.mi_collect(false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.heap.?, ptr);
|
||||
}
|
||||
pub const supports_posix_memalign = true;
|
||||
|
||||
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
return mimalloc.mi_malloc_usable_size(ptr);
|
||||
}
|
||||
|
||||
fn alloc(arena: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
|
||||
const self = bun.cast(*mimalloc.Heap, arena);
|
||||
|
||||
return alignedAlloc(
|
||||
self,
|
||||
len,
|
||||
alignment,
|
||||
);
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
buf: []u8,
|
||||
alignment: mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
// mi_free_size internally just asserts the size
|
||||
// so it's faster if we don't pass that value through
|
||||
// but its good to have that assertion
|
||||
if (comptime Environment.isDebug) {
|
||||
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
|
||||
if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_free_size(buf.ptr, buf.len);
|
||||
} else {
|
||||
mimalloc.mi_free(buf.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn remap(self: *anyopaque, buf: []u8, alignment: mem.Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(@ptrCast(self), buf.ptr, new_len, aligned_size);
|
||||
return @ptrCast(value);
|
||||
}
|
||||
|
||||
pub fn isInstance(allocator_: Allocator) bool {
|
||||
return allocator_.vtable == &c_allocator_vtable;
|
||||
}
|
||||
|
||||
const c_allocator_vtable = Allocator.VTable{
|
||||
.alloc = &Self.alloc,
|
||||
.resize = &Self.resize,
|
||||
.remap = &Self.remap,
|
||||
.free = &Self.free,
|
||||
};
|
||||
|
||||
const Environment = @import("../env.zig");
|
||||
const FeatureFlags = @import("../feature_flags.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
@@ -1,4 +1,6 @@
|
||||
//! A nullable allocator the same size as `std.mem.Allocator`.
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
|
||||
const NullableAllocator = @This();
|
||||
|
||||
@@ -30,8 +32,8 @@ pub inline fn get(this: NullableAllocator) ?std.mem.Allocator {
|
||||
pub fn free(this: *const NullableAllocator, bytes: []const u8) void {
|
||||
if (this.get()) |allocator| {
|
||||
if (bun.String.isWTFAllocator(allocator)) {
|
||||
// avoid calling `std.mem.Allocator.free` as it sets the memory to undefined
|
||||
allocator.rawFree(@constCast(bytes), .@"1", 0);
|
||||
// workaround for https://github.com/ziglang/zig/issues/4298
|
||||
bun.String.StringImplAllocator.free(allocator.ptr, @constCast(bytes), .fromByteUnits(1), 0);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -44,6 +46,3 @@ comptime {
|
||||
@compileError("Expected the sizes to be the same.");
|
||||
}
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
187
src/allocators/linux_memfd_allocator.zig
Normal file
187
src/allocators/linux_memfd_allocator.zig
Normal file
@@ -0,0 +1,187 @@
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
/// When cloning large amounts of data potentially multiple times, we can
|
||||
/// leverage copy-on-write memory to avoid actually copying the data. To do that
|
||||
/// on Linux, we need to use a memfd, which is a Linux-specific feature.
|
||||
///
|
||||
/// The steps are roughly:
|
||||
///
|
||||
/// 1. Create a memfd
|
||||
/// 2. Write the data to the memfd
|
||||
/// 3. Map the memfd into memory
|
||||
///
|
||||
/// Then, to clone the data later, we can just call `mmap` again.
|
||||
///
|
||||
/// The big catch is that mmap(), memfd_create(), write() all have overhead. And
|
||||
/// often we will re-use virtual memory within the process. This does not reuse
|
||||
/// the virtual memory. So we should only really use this for large blobs of
|
||||
/// data that we expect to be cloned multiple times. Such as Blob in FormData.
|
||||
pub const LinuxMemFdAllocator = struct {
|
||||
const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
ref_count: RefCount,
|
||||
fd: bun.FileDescriptor = .invalid,
|
||||
size: usize = 0,
|
||||
|
||||
var memfd_counter = std.atomic.Value(usize).init(0);
|
||||
|
||||
fn deinit(this: *LinuxMemFdAllocator) void {
|
||||
this.fd.close();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn allocator(this: *LinuxMemFdAllocator) std.mem.Allocator {
|
||||
return .{
|
||||
.ptr = this,
|
||||
.vtable = AllocatorInterface.VTable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn from(allocator_: std.mem.Allocator) ?*LinuxMemFdAllocator {
|
||||
if (allocator_.vtable == AllocatorInterface.VTable) {
|
||||
return @alignCast(@ptrCast(allocator_.ptr));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const AllocatorInterface = struct {
|
||||
fn alloc(_: *anyopaque, _: usize, _: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
// it should perform no allocations or resizes
|
||||
return null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
ptr: *anyopaque,
|
||||
buf: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
var this: *LinuxMemFdAllocator = @alignCast(@ptrCast(ptr));
|
||||
defer this.deref();
|
||||
bun.sys.munmap(@alignCast(@ptrCast(buf))).unwrap() catch |err| {
|
||||
bun.Output.debugWarn("Failed to munmap memfd: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
pub const VTable = &std.mem.Allocator.VTable{
|
||||
.alloc = &AllocatorInterface.alloc,
|
||||
.resize = &std.mem.Allocator.noResize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &free,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
var size = len;
|
||||
|
||||
// size rounded up to nearest page
|
||||
size = std.mem.alignForward(usize, size, std.heap.pageSize());
|
||||
|
||||
var flags_mut = flags;
|
||||
flags_mut.TYPE = .SHARED;
|
||||
|
||||
switch (bun.sys.mmap(
|
||||
null,
|
||||
@min(size, this.size),
|
||||
std.posix.PROT.READ | std.posix.PROT.WRITE,
|
||||
flags_mut,
|
||||
this.fd,
|
||||
offset,
|
||||
)) {
|
||||
.result => |slice| {
|
||||
return .{
|
||||
.result = bun.webcore.Blob.Store.Bytes{
|
||||
.cap = @truncate(slice.len),
|
||||
.ptr = slice.ptr,
|
||||
.len = @truncate(len),
|
||||
.allocator = this.allocator(),
|
||||
},
|
||||
};
|
||||
},
|
||||
.err => |errno| {
|
||||
return .{ .err = errno };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shouldUse(bytes: []const u8) bool {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (bun.JSC.VirtualMachine.is_smol_mode) {
|
||||
return bytes.len >= 1024 * 1024 * 1;
|
||||
}
|
||||
|
||||
// This is a net 2x - 4x slowdown to new Blob([huge])
|
||||
// so we must be careful
|
||||
return bytes.len >= 1024 * 1024 * 8;
|
||||
}
|
||||
|
||||
pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var label_buf: [128]u8 = undefined;
|
||||
const label = std.fmt.bufPrintZ(&label_buf, "memfd-num-{d}", .{memfd_counter.fetchAdd(1, .monotonic)}) catch "";
|
||||
|
||||
// Using huge pages was slower.
|
||||
const fd = switch (bun.sys.memfd_create(label, std.os.linux.MFD.CLOEXEC)) {
|
||||
.err => |err| return .{ .err = bun.sys.Error.fromCode(err.getErrno(), .open) },
|
||||
.result => |fd| fd,
|
||||
};
|
||||
|
||||
if (bytes.len > 0)
|
||||
// Hint at the size of the file
|
||||
_ = bun.sys.ftruncate(fd, @intCast(bytes.len));
|
||||
|
||||
// Dump all the bytes in there
|
||||
var written: isize = 0;
|
||||
|
||||
var remain = bytes;
|
||||
while (remain.len > 0) {
|
||||
switch (bun.sys.pwrite(fd, remain, written)) {
|
||||
.err => |err| {
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bun.Output.debugWarn("Failed to write to memfd: {}", .{err});
|
||||
fd.close();
|
||||
return .{ .err = err };
|
||||
},
|
||||
.result => |result| {
|
||||
if (result == 0) {
|
||||
bun.Output.debugWarn("Failed to write to memfd: EOF", .{});
|
||||
fd.close();
|
||||
return .{ .err = bun.sys.Error.fromCode(.NOMEM, .write) };
|
||||
}
|
||||
written += @intCast(result);
|
||||
remain = remain[result..];
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var linux_memfd_allocator = LinuxMemFdAllocator.new(.{
|
||||
.fd = fd,
|
||||
.ref_count = .init(),
|
||||
.size = bytes.len,
|
||||
});
|
||||
|
||||
switch (linux_memfd_allocator.alloc(bytes.len, 0, .{ .TYPE = .SHARED })) {
|
||||
.result => |res| {
|
||||
return .{ .result = res };
|
||||
},
|
||||
.err => |err| {
|
||||
linux_memfd_allocator.deref();
|
||||
return .{ .err = err };
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
53
src/allocators/max_heap_allocator.zig
Normal file
53
src/allocators/max_heap_allocator.zig
Normal file
@@ -0,0 +1,53 @@
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
/// Single allocation only.
|
||||
///
|
||||
pub const MaxHeapAllocator = struct {
|
||||
array_list: std.ArrayListAligned(u8, @alignOf(std.c.max_align_t)),
|
||||
|
||||
fn alloc(ptr: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
bun.assert(alignment.toByteUnits() <= @alignOf(std.c.max_align_t));
|
||||
var this = bun.cast(*MaxHeapAllocator, ptr);
|
||||
this.array_list.items.len = 0;
|
||||
this.array_list.ensureTotalCapacity(len) catch return null;
|
||||
this.array_list.items.len = len;
|
||||
return this.array_list.items.ptr;
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
|
||||
_ = new_len;
|
||||
_ = buf;
|
||||
@panic("not implemented");
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
_: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {}
|
||||
|
||||
pub fn reset(this: *MaxHeapAllocator) void {
|
||||
this.array_list.items.len = 0;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *MaxHeapAllocator) void {
|
||||
this.array_list.deinit();
|
||||
}
|
||||
|
||||
const vtable = std.mem.Allocator.VTable{
|
||||
.alloc = &alloc,
|
||||
.free = &free,
|
||||
.resize = &resize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
};
|
||||
pub fn init(this: *MaxHeapAllocator, allocator: std.mem.Allocator) std.mem.Allocator {
|
||||
this.array_list = .init(allocator);
|
||||
|
||||
return std.mem.Allocator{
|
||||
.ptr = this,
|
||||
.vtable = &vtable,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,4 +1,11 @@
|
||||
const mem = @import("std").mem;
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
const assert = bun.assert;
|
||||
const Allocator = mem.Allocator;
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
const Environment = @import("../env.zig");
|
||||
|
||||
fn mimalloc_free(
|
||||
_: *anyopaque,
|
||||
@@ -13,7 +20,8 @@ fn mimalloc_free(
|
||||
// but its good to have that assertion
|
||||
// let's only enable it in debug mode
|
||||
if (comptime Environment.isDebug) {
|
||||
if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
|
||||
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
|
||||
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_free_size(buf.ptr, buf.len);
|
||||
@@ -24,11 +32,12 @@ fn mimalloc_free(
|
||||
|
||||
const MimallocAllocator = struct {
|
||||
pub const supports_posix_memalign = true;
|
||||
|
||||
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
if (comptime Environment.enable_logs)
|
||||
log("mi_alloc({d}, {d})", .{ len, alignment.toByteUnits() });
|
||||
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
|
||||
mimalloc.mi_malloc_aligned(len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_malloc(len);
|
||||
@@ -82,7 +91,7 @@ const ZAllocator = struct {
|
||||
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
log("ZAllocator.alignedAlloc: {d}\n", .{len});
|
||||
|
||||
const ptr = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
const ptr = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
|
||||
mimalloc.mi_zalloc_aligned(len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_zalloc(len);
|
||||
@@ -141,12 +150,3 @@ const z_allocator_vtable = Allocator.VTable{
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &ZAllocator.free_with_z_allocator,
|
||||
};
|
||||
|
||||
const Environment = @import("../env.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const mimalloc = bun.mimalloc;
|
||||
|
||||
const mem = @import("std").mem;
|
||||
const Allocator = mem.Allocator;
|
||||
@@ -127,14 +127,13 @@ pub extern fn mi_reserve_huge_os_pages_at(pages: usize, numa_node: c_int, timeou
|
||||
pub extern fn mi_reserve_os_memory(size: usize, commit: bool, allow_large: bool) c_int;
|
||||
pub extern fn mi_manage_os_memory(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int) bool;
|
||||
pub extern fn mi_debug_show_arenas() void;
|
||||
pub const ArenaID = ?*anyopaque;
|
||||
pub extern fn mi_arena_area(arena_id: ArenaID, size: *usize) ?*anyopaque;
|
||||
pub const ArenaID = c_int;
|
||||
pub extern fn mi_arena_area(arena_id: ArenaID, size: [*c]usize) ?*anyopaque;
|
||||
pub extern fn mi_reserve_huge_os_pages_at_ex(pages: usize, numa_node: c_int, timeout_msecs: usize, exclusive: bool, arena_id: *ArenaID) c_int;
|
||||
pub extern fn mi_reserve_os_memory_ex(size: usize, commit: bool, allow_large: bool, exclusive: bool, arena_id: *ArenaID) c_int;
|
||||
pub extern fn mi_manage_os_memory_ex(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int, exclusive: bool, arena_id: *ArenaID) bool;
|
||||
pub extern fn mi_heap_new_in_arena(arena_id: ArenaID) ?*Heap;
|
||||
pub extern fn mi_reserve_huge_os_pages(pages: usize, max_secs: f64, pages_reserved: [*c]usize) c_int;
|
||||
pub extern fn mi_thread_set_in_threadpool() void;
|
||||
pub const Option = enum(c_uint) {
|
||||
show_errors = 0,
|
||||
show_stats = 1,
|
||||
@@ -203,13 +202,12 @@ pub const MI_SMALL_WSIZE_MAX = @as(c_int, 128);
|
||||
pub const MI_SMALL_SIZE_MAX = MI_SMALL_WSIZE_MAX * @import("std").zig.c_translation.sizeof(?*anyopaque);
|
||||
pub const MI_ALIGNMENT_MAX = (@as(c_int, 16) * @as(c_int, 1024)) * @as(c_ulong, 1024);
|
||||
|
||||
const MI_MAX_ALIGN_SIZE = 16;
|
||||
|
||||
pub fn mustUseAlignedAlloc(alignment: std.mem.Alignment) bool {
|
||||
return alignment.toByteUnits() > MI_MAX_ALIGN_SIZE;
|
||||
}
|
||||
|
||||
pub const mi_arena_id_t = ?*anyopaque;
|
||||
pub extern fn mi_heap_new_ex(heap_tag: c_int, allow_destroy: bool, arena_id: mi_arena_id_t) ?*Heap;
|
||||
|
||||
const std = @import("std");
|
||||
pub fn canUseAlignedAlloc(len: usize, alignment: usize) bool {
|
||||
return alignment > 0 and std.math.isPowerOfTwo(alignment) and !mi_malloc_satisfies_alignment(alignment, len);
|
||||
}
|
||||
const MI_MAX_ALIGN_SIZE = 16;
|
||||
inline fn mi_malloc_satisfies_alignment(alignment: usize, size: usize) bool {
|
||||
return (alignment == @sizeOf(*anyopaque) or
|
||||
(alignment == MI_MAX_ALIGN_SIZE and size >= (MI_MAX_ALIGN_SIZE / 2)));
|
||||
}
|
||||
|
||||
169
src/allocators/mimalloc_arena.zig
Normal file
169
src/allocators/mimalloc_arena.zig
Normal file
@@ -0,0 +1,169 @@
|
||||
const mem = @import("std").mem;
|
||||
const std = @import("std");
|
||||
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
const Environment = @import("../env.zig");
|
||||
const FeatureFlags = @import("../feature_flags.zig");
|
||||
const Allocator = mem.Allocator;
|
||||
const assert = bun.assert;
|
||||
const bun = @import("bun");
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
|
||||
pub const Arena = struct {
|
||||
heap: ?*mimalloc.Heap = null,
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadlocalDefault() Allocator {
|
||||
return Allocator{ .ptr = mimalloc.mi_heap_get_default(), .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn backingAllocator(this: Arena) Allocator {
|
||||
var arena = Arena{ .heap = this.heap.?.backing() };
|
||||
return arena.allocator();
|
||||
}
|
||||
|
||||
pub fn allocator(this: Arena) Allocator {
|
||||
@setRuntimeSafety(false);
|
||||
return Allocator{ .ptr = this.heap.?, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn dumpThreadStats(_: *Arena) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_thread_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn dumpStats(_: *Arena) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn deinit(this: *Arena) void {
|
||||
mimalloc.mi_heap_destroy(bun.take(&this.heap).?);
|
||||
}
|
||||
pub fn init() !Arena {
|
||||
const arena = Arena{ .heap = mimalloc.mi_heap_new() orelse return error.OutOfMemory };
|
||||
return arena;
|
||||
}
|
||||
|
||||
pub fn gc(this: Arena) void {
|
||||
mimalloc.mi_heap_collect(this.heap orelse return, false);
|
||||
}
|
||||
|
||||
pub inline fn helpCatchMemoryIssues(this: Arena) void {
|
||||
if (comptime FeatureFlags.help_catch_memory_issues) {
|
||||
this.gc();
|
||||
bun.Mimalloc.mi_collect(false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(this: Arena, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(this.heap.?, ptr);
|
||||
}
|
||||
pub const supports_posix_memalign = true;
|
||||
|
||||
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
return mimalloc.mi_malloc_usable_size(ptr);
|
||||
}
|
||||
|
||||
fn alloc(arena: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
|
||||
const this = bun.cast(*mimalloc.Heap, arena);
|
||||
|
||||
return alignedAlloc(
|
||||
this,
|
||||
len,
|
||||
alignment,
|
||||
);
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
buf: []u8,
|
||||
alignment: mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
// mi_free_size internally just asserts the size
|
||||
// so it's faster if we don't pass that value through
|
||||
// but its good to have that assertion
|
||||
if (comptime Environment.isDebug) {
|
||||
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
|
||||
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
|
||||
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_free_size(buf.ptr, buf.len);
|
||||
} else {
|
||||
mimalloc.mi_free(buf.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn remap(this: *anyopaque, buf: []u8, alignment: mem.Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(@ptrCast(this), buf.ptr, new_len, aligned_size);
|
||||
return @ptrCast(value);
|
||||
}
|
||||
};
|
||||
|
||||
const c_allocator_vtable = Allocator.VTable{
|
||||
.alloc = &Arena.alloc,
|
||||
.resize = &Arena.resize,
|
||||
.remap = &Arena.remap,
|
||||
.free = &Arena.free,
|
||||
};
|
||||
@@ -1,3 +1,5 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub const Reader = struct {
|
||||
const Self = @This();
|
||||
pub const ReadError = error{EOF};
|
||||
@@ -516,5 +518,3 @@ pub const analytics = struct {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
@@ -1,3 +1,11 @@
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Analytics = @import("./analytics_schema.zig").analytics;
|
||||
const Semver = bun.Semver;
|
||||
|
||||
/// Enables analytics. This is used by:
|
||||
/// - crash_handler.zig's `report` function to anonymously report crashes
|
||||
///
|
||||
@@ -110,7 +118,6 @@ pub const Features = struct {
|
||||
pub var csrf_verify: usize = 0;
|
||||
pub var csrf_generate: usize = 0;
|
||||
pub var unsupported_uv_function: usize = 0;
|
||||
pub var exited: usize = 0;
|
||||
|
||||
comptime {
|
||||
@export(&napi_module_register, .{ .name = "Bun__napi_module_register_count" });
|
||||
@@ -255,7 +262,7 @@ pub const EventName = enum(u8) {
|
||||
|
||||
var random: std.rand.DefaultPrng = undefined;
|
||||
|
||||
const platform_arch = if (Environment.isAarch64) analytics.Architecture.arm else analytics.Architecture.x64;
|
||||
const platform_arch = if (Environment.isAarch64) Analytics.Architecture.arm else Analytics.Architecture.x64;
|
||||
|
||||
// TODO: move this code somewhere more appropriate, and remove it from "analytics"
|
||||
// The following code is not currently even used for analytics, just feature-detection
|
||||
@@ -263,10 +270,10 @@ const platform_arch = if (Environment.isAarch64) analytics.Architecture.arm else
|
||||
pub const GenerateHeader = struct {
|
||||
pub const GeneratePlatform = struct {
|
||||
var osversion_name: [32]u8 = undefined;
|
||||
fn forMac() analytics.Platform {
|
||||
fn forMac() Analytics.Platform {
|
||||
@memset(&osversion_name, 0);
|
||||
|
||||
var platform = analytics.Platform{ .os = analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var platform = Analytics.Platform{ .os = Analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var len = osversion_name.len - 1;
|
||||
// this previously used "kern.osrelease", which was the darwin xnu kernel version
|
||||
// That is less useful than "kern.osproductversion", which is the macOS version
|
||||
@@ -277,8 +284,8 @@ pub const GenerateHeader = struct {
|
||||
}
|
||||
|
||||
pub var linux_os_name: std.c.utsname = undefined;
|
||||
var platform_: analytics.Platform = undefined;
|
||||
pub const Platform = analytics.Platform;
|
||||
var platform_: Analytics.Platform = undefined;
|
||||
pub const Platform = Analytics.Platform;
|
||||
var linux_kernel_version: Semver.Version = undefined;
|
||||
var run_once = std.once(struct {
|
||||
fn run() void {
|
||||
@@ -293,7 +300,7 @@ pub const GenerateHeader = struct {
|
||||
linux_kernel_version = result.version.min();
|
||||
} else if (Environment.isWindows) {
|
||||
platform_ = Platform{
|
||||
.os = analytics.OperatingSystem.windows,
|
||||
.os = Analytics.OperatingSystem.windows,
|
||||
.version = &[_]u8{},
|
||||
.arch = platform_arch,
|
||||
};
|
||||
@@ -301,7 +308,7 @@ pub const GenerateHeader = struct {
|
||||
}
|
||||
}.run);
|
||||
|
||||
pub fn forOS() analytics.Platform {
|
||||
pub fn forOS() Analytics.Platform {
|
||||
run_once.call();
|
||||
return platform_;
|
||||
}
|
||||
@@ -351,7 +358,7 @@ pub const GenerateHeader = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn forLinux() analytics.Platform {
|
||||
fn forLinux() Analytics.Platform {
|
||||
linux_os_name = std.mem.zeroes(@TypeOf(linux_os_name));
|
||||
|
||||
_ = std.c.uname(&linux_os_name);
|
||||
@@ -361,17 +368,10 @@ pub const GenerateHeader = struct {
|
||||
|
||||
// Linux DESKTOP-P4LCIEM 5.10.16.3-microsoft-standard-WSL2 #1 SMP Fri Apr 2 22:23:49 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
|
||||
if (std.mem.indexOf(u8, release, "microsoft") != null) {
|
||||
return analytics.Platform{ .os = analytics.OperatingSystem.wsl, .version = release, .arch = platform_arch };
|
||||
return Analytics.Platform{ .os = Analytics.OperatingSystem.wsl, .version = release, .arch = platform_arch };
|
||||
}
|
||||
|
||||
return analytics.Platform{ .os = analytics.OperatingSystem.linux, .version = release, .arch = platform_arch };
|
||||
return Analytics.Platform{ .os = Analytics.OperatingSystem.linux, .version = release, .arch = platform_arch };
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const analytics = @import("./analytics/schema.zig").analytics;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Semver = bun.Semver;
|
||||
@@ -1,4 +1,4 @@
|
||||
package api;
|
||||
package Api;
|
||||
|
||||
smol Loader {
|
||||
jsx = 1;
|
||||
@@ -64,7 +64,7 @@ struct StackTrace {
|
||||
message JSException {
|
||||
string name = 1;
|
||||
string message = 2;
|
||||
|
||||
|
||||
uint16 runtime_type = 3;
|
||||
uint8 code = 4;
|
||||
|
||||
@@ -103,7 +103,7 @@ message FallbackMessageContainer {
|
||||
Problems problems = 4;
|
||||
string cwd = 5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
smol ResolveMode {
|
||||
disable = 1;
|
||||
@@ -178,18 +178,18 @@ struct JavascriptBundle {
|
||||
// These are sorted alphabetically so you can do binary search
|
||||
JavascriptBundledModule[] modules;
|
||||
JavascriptBundledPackage[] packages;
|
||||
|
||||
|
||||
// This is ASCII-encoded so you can send it directly over HTTP
|
||||
byte[] etag;
|
||||
|
||||
uint32 generated_at;
|
||||
|
||||
|
||||
// generated by hashing all ${name}@${version} in sorted order
|
||||
byte[] app_package_json_dependencies_hash;
|
||||
|
||||
byte[] import_from_name;
|
||||
|
||||
// This is what StringPointer refers to
|
||||
// This is what StringPointer refers to
|
||||
byte[] manifest_string;
|
||||
}
|
||||
|
||||
@@ -359,7 +359,7 @@ smol SourceMapMode {
|
||||
struct FileHandle {
|
||||
string path;
|
||||
uint size;
|
||||
uint fd;
|
||||
uint fd;
|
||||
}
|
||||
|
||||
message Transform {
|
||||
@@ -462,7 +462,7 @@ struct Log {
|
||||
|
||||
smol Reloader {
|
||||
disable = 1;
|
||||
// equivalent of CMD + R
|
||||
// equivalent of CMD + R
|
||||
live = 2;
|
||||
// React Fast Refresh
|
||||
fast_refresh = 3;
|
||||
@@ -534,7 +534,7 @@ struct WebsocketMessageBuildSuccess {
|
||||
|
||||
Loader loader;
|
||||
string module_path;
|
||||
|
||||
|
||||
// This is the length of the blob that immediately follows this message.
|
||||
uint32 blob_length;
|
||||
}
|
||||
@@ -630,4 +630,4 @@ struct TestResponseItem {
|
||||
struct GetTestsResponse {
|
||||
TestResponseItem[] tests;
|
||||
byte[] contents;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,8 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const js_ast = bun.JSAst;
|
||||
const OOM = bun.OOM;
|
||||
|
||||
pub const Reader = struct {
|
||||
const Self = @This();
|
||||
pub const ReadError = error{EOF};
|
||||
@@ -320,7 +325,7 @@ pub fn Writer(comptime WritableStream: type) type {
|
||||
pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8));
|
||||
pub const FileWriter = Writer(std.fs.File);
|
||||
|
||||
pub const api = struct {
|
||||
pub const Api = struct {
|
||||
pub const Loader = enum(u8) {
|
||||
_none,
|
||||
jsx,
|
||||
@@ -421,7 +426,7 @@ pub const api = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const StackFramePosition = bun.jsc.ZigStackFramePosition;
|
||||
pub const StackFramePosition = bun.JSC.ZigStackFramePosition;
|
||||
|
||||
pub const SourceLine = struct {
|
||||
/// line
|
||||
@@ -1951,7 +1956,7 @@ pub const api = struct {
|
||||
|
||||
_,
|
||||
|
||||
pub fn fromJS(global: *bun.jsc.JSGlobalObject, value: bun.jsc.JSValue) bun.JSError!?SourceMapMode {
|
||||
pub fn fromJS(global: *bun.JSC.JSGlobalObject, value: bun.JSC.JSValue) bun.JSError!?SourceMapMode {
|
||||
if (value.isString()) {
|
||||
const str = try value.toSliceOrNull(global);
|
||||
defer str.deinit();
|
||||
@@ -2875,13 +2880,13 @@ pub const api = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) OOM!api.NpmRegistry {
|
||||
pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) OOM!Api.NpmRegistry {
|
||||
return try this.parseRegistryURLStringImpl(str.data);
|
||||
}
|
||||
|
||||
pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) OOM!api.NpmRegistry {
|
||||
pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) OOM!Api.NpmRegistry {
|
||||
const url = bun.URL.parse(str);
|
||||
var registry = std.mem.zeroes(api.NpmRegistry);
|
||||
var registry = std.mem.zeroes(Api.NpmRegistry);
|
||||
|
||||
// Token
|
||||
if (url.username.len == 0 and url.password.len > 0) {
|
||||
@@ -2900,8 +2905,8 @@ pub const api = struct {
|
||||
return registry;
|
||||
}
|
||||
|
||||
fn parseRegistryObject(this: *Parser, obj: *js_ast.E.Object) !api.NpmRegistry {
|
||||
var registry = std.mem.zeroes(api.NpmRegistry);
|
||||
fn parseRegistryObject(this: *Parser, obj: *js_ast.E.Object) !Api.NpmRegistry {
|
||||
var registry = std.mem.zeroes(Api.NpmRegistry);
|
||||
|
||||
if (obj.get("url")) |url| {
|
||||
try this.expectString(url);
|
||||
@@ -2928,7 +2933,7 @@ pub const api = struct {
|
||||
return registry;
|
||||
}
|
||||
|
||||
pub fn parseRegistry(this: *Parser, expr: js_ast.Expr) !api.NpmRegistry {
|
||||
pub fn parseRegistry(this: *Parser, expr: js_ast.Expr) !Api.NpmRegistry {
|
||||
switch (expr.data) {
|
||||
.e_string => |str| {
|
||||
return this.parseRegistryURLString(str);
|
||||
@@ -2938,7 +2943,7 @@ pub const api = struct {
|
||||
},
|
||||
else => {
|
||||
try this.addError(expr.loc, "Expected registry to be a URL string or an object");
|
||||
return std.mem.zeroes(api.NpmRegistry);
|
||||
return std.mem.zeroes(Api.NpmRegistry);
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -3365,9 +3370,3 @@ pub const api = struct {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
const js_ast = bun.ast;
|
||||
|
||||
93
src/asan.zig
93
src/asan.zig
@@ -1,93 +0,0 @@
|
||||
/// https://github.com/llvm/llvm-project/blob/main/compiler-rt/include/sanitizer/asan_interface.h
|
||||
const c = if (bun.Environment.enable_asan) struct {
|
||||
extern fn __asan_poison_memory_region(ptr: *const anyopaque, size: usize) void;
|
||||
extern fn __asan_unpoison_memory_region(ptr: *const anyopaque, size: usize) void;
|
||||
extern fn __asan_address_is_poisoned(ptr: *const anyopaque) bool;
|
||||
extern fn __asan_describe_address(ptr: *const anyopaque) void;
|
||||
extern fn __asan_update_allocation_context(ptr: *const anyopaque) c_int;
|
||||
|
||||
pub fn poison(ptr: *const anyopaque, size: usize) void {
|
||||
__asan_poison_memory_region(ptr, size);
|
||||
}
|
||||
pub fn unpoison(ptr: *const anyopaque, size: usize) void {
|
||||
__asan_unpoison_memory_region(ptr, size);
|
||||
}
|
||||
pub fn isPoisoned(ptr: *const anyopaque) bool {
|
||||
return __asan_address_is_poisoned(ptr);
|
||||
}
|
||||
pub fn describe(ptr: *const anyopaque) void {
|
||||
__asan_describe_address(ptr);
|
||||
}
|
||||
pub fn updateAllocationContext(ptr: *const anyopaque) c_int {
|
||||
return __asan_update_allocation_context(ptr);
|
||||
}
|
||||
} else struct {
|
||||
pub fn poison(_: *const anyopaque) void {}
|
||||
pub fn unpoison(_: *const anyopaque) void {}
|
||||
pub fn isPoisoned(_: *const anyopaque) bool {
|
||||
return false;
|
||||
}
|
||||
pub fn describe(_: *const anyopaque) void {}
|
||||
pub fn updateAllocationContext(_: *const anyopaque) c_int {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
pub const enabled = bun.Environment.enable_asan;
|
||||
|
||||
/// Update allocation stack trace for the given allocation to the current stack
|
||||
/// trace
|
||||
pub fn updateAllocationContext(ptr: *const anyopaque) bool {
|
||||
if (!comptime enabled) return false;
|
||||
return c.updateAllocationContext(ptr) == 1;
|
||||
}
|
||||
|
||||
/// Describes an address (prints out where it was allocated, freed, stacktraces,
|
||||
/// etc.)
|
||||
pub fn describe(ptr: *const anyopaque) void {
|
||||
if (!comptime enabled) return;
|
||||
c.describe(ptr);
|
||||
}
|
||||
|
||||
/// Manually poison a memory region
|
||||
///
|
||||
/// Useful for making custom allocators asan-aware (for example HiveArray)
|
||||
///
|
||||
/// *NOT* threadsafe
|
||||
pub fn poison(ptr: *const anyopaque, size: usize) void {
|
||||
if (!comptime enabled) return;
|
||||
c.poison(ptr, size);
|
||||
}
|
||||
|
||||
/// Manually unpoison a memory region
|
||||
///
|
||||
/// Useful for making custom allocators asan-aware (for example HiveArray)
|
||||
///
|
||||
/// *NOT* threadsafe
|
||||
pub fn unpoison(ptr: *const anyopaque, size: usize) void {
|
||||
if (!comptime enabled) return;
|
||||
c.unpoison(ptr, size);
|
||||
}
|
||||
|
||||
fn isPoisoned(ptr: *const anyopaque) bool {
|
||||
if (!comptime enabled) return false;
|
||||
return c.isPoisoned(ptr);
|
||||
}
|
||||
|
||||
pub fn assertPoisoned(ptr: *const anyopaque) void {
|
||||
if (!comptime enabled) return;
|
||||
if (!isPoisoned(ptr)) {
|
||||
c.describe(ptr);
|
||||
@panic("Address is not poisoned");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assertUnpoisoned(ptr: *const anyopaque) void {
|
||||
if (!comptime enabled) return;
|
||||
if (isPoisoned(ptr)) {
|
||||
c.describe(ptr);
|
||||
@panic("Address is poisoned");
|
||||
}
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
@@ -85,10 +85,12 @@ pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) voi
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
@@ -112,9 +112,7 @@ pub fn deinit(this: *Ast) void {
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
const Runtime = @import("../runtime.zig").Runtime;
|
||||
@@ -123,13 +121,13 @@ const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Expr = js_ast.Expr;
|
||||
const G = js_ast.G;
|
||||
const InlinedEnumValue = js_ast.InlinedEnumValue;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const NamedExport = js_ast.NamedExport;
|
||||
@@ -140,3 +138,6 @@ const RefHashCtx = js_ast.RefHashCtx;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -87,18 +87,20 @@ pub const B = union(Binding.Tag) {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Class = G.Class;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ArrayBinding = js_ast.ArrayBinding;
|
||||
const Binding = js_ast.Binding;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const Flags = js_ast.Flags;
|
||||
const G = js_ast.G;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -147,14 +147,15 @@ pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding
|
||||
}
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
|
||||
@@ -201,24 +201,18 @@ pub fn addUrlForCss(
|
||||
}
|
||||
}
|
||||
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const BundledAst = js_ast.BundledAst;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
@@ -228,3 +222,10 @@ const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const TlaCheck = js_ast.TlaCheck;
|
||||
|
||||
const Ast = js_ast.Ast;
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
|
||||
@@ -124,14 +124,16 @@ pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier
|
||||
return minifier;
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
// @sortImports
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const G = js_ast.G;
|
||||
const NameMinifier = js_ast.NameMinifier;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -66,9 +66,9 @@ pub const Array = struct {
|
||||
return ExprNodeList.init(out[0 .. out.len - remain.len]);
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
const items = this.items.slice();
|
||||
var array = try jsc.JSValue.createEmptyArray(globalObject, items.len);
|
||||
var array = try JSC.JSValue.createEmptyArray(globalObject, items.len);
|
||||
array.protect();
|
||||
defer array.unprotect();
|
||||
for (items, 0..) |expr, j| {
|
||||
@@ -108,8 +108,8 @@ pub const Binary = struct {
|
||||
|
||||
pub const Boolean = struct {
|
||||
value: bool,
|
||||
pub fn toJS(this: @This(), ctx: *jsc.JSGlobalObject) jsc.C.JSValueRef {
|
||||
return jsc.C.JSValueMakeBoolean(ctx, this.value);
|
||||
pub fn toJS(this: @This(), ctx: *JSC.JSGlobalObject) JSC.C.JSValueRef {
|
||||
return JSC.C.JSValueMakeBoolean(ctx, this.value);
|
||||
}
|
||||
};
|
||||
pub const Super = struct {};
|
||||
@@ -466,8 +466,8 @@ pub const Number = struct {
|
||||
return try writer.write(self.value);
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This()) jsc.JSValue {
|
||||
return jsc.JSValue.jsNumber(this.value);
|
||||
pub fn toJS(this: @This()) JSC.JSValue {
|
||||
return JSC.JSValue.jsNumber(this.value);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -480,9 +480,9 @@ pub const BigInt = struct {
|
||||
return try writer.write(self.value);
|
||||
}
|
||||
|
||||
pub fn toJS(_: @This()) jsc.JSValue {
|
||||
pub fn toJS(_: @This()) JSC.JSValue {
|
||||
// TODO:
|
||||
return jsc.JSValue.jsNumber(0);
|
||||
return JSC.JSValue.jsNumber(0);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -515,8 +515,8 @@ pub const Object = struct {
|
||||
return if (asProperty(self, key)) |query| query.expr else @as(?Expr, null);
|
||||
}
|
||||
|
||||
pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
var obj = jsc.JSValue.createEmptyObject(globalObject, this.properties.len);
|
||||
pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
var obj = JSC.JSValue.createEmptyObject(globalObject, this.properties.len);
|
||||
obj.protect();
|
||||
defer obj.unprotect();
|
||||
const props: []const G.Property = this.properties.slice();
|
||||
@@ -955,7 +955,7 @@ pub const String = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) ![]const u8 {
|
||||
pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) !bun.string {
|
||||
if (str.isUTF8()) {
|
||||
return allocator.dupe(u8, str.string(allocator) catch unreachable);
|
||||
}
|
||||
@@ -1005,7 +1005,7 @@ pub const String = struct {
|
||||
return strings.utf16EqlString(other.slice16(), s.data);
|
||||
}
|
||||
},
|
||||
[]const u8 => {
|
||||
bun.string => {
|
||||
return strings.eqlLong(s.data, other, true);
|
||||
},
|
||||
[]u16, []const u16 => {
|
||||
@@ -1024,7 +1024,7 @@ pub const String = struct {
|
||||
return std.mem.eql(u16, other.slice16(), s.slice16());
|
||||
}
|
||||
},
|
||||
[]const u8 => {
|
||||
bun.string => {
|
||||
return strings.utf16EqlString(s.slice16(), other);
|
||||
},
|
||||
[]u16, []const u16 => {
|
||||
@@ -1055,7 +1055,7 @@ pub const String = struct {
|
||||
strings.eqlComptimeUTF16(s.slice16()[0..value.len], value);
|
||||
}
|
||||
|
||||
pub fn string(s: *const String, allocator: std.mem.Allocator) OOM![]const u8 {
|
||||
pub fn string(s: *const String, allocator: std.mem.Allocator) OOM!bun.string {
|
||||
if (s.isUTF8()) {
|
||||
return s.data;
|
||||
} else {
|
||||
@@ -1063,7 +1063,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM![:0]const u8 {
|
||||
pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM!bun.stringZ {
|
||||
if (s.isUTF8()) {
|
||||
return allocator.dupeZ(u8, s.data);
|
||||
} else {
|
||||
@@ -1071,7 +1071,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM![]const u8 {
|
||||
pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM!bun.string {
|
||||
if (s.isUTF8()) {
|
||||
return allocator.dupe(u8, s.data);
|
||||
} else {
|
||||
@@ -1091,7 +1091,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) !jsc.JSValue {
|
||||
pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) !JSC.JSValue {
|
||||
s.resolveRopeIfNeeded(allocator);
|
||||
if (!s.isPresent()) {
|
||||
var emp = bun.String.empty;
|
||||
@@ -1115,11 +1115,11 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toZigString(s: *String, allocator: std.mem.Allocator) jsc.ZigString {
|
||||
pub fn toZigString(s: *String, allocator: std.mem.Allocator) JSC.ZigString {
|
||||
if (s.isUTF8()) {
|
||||
return jsc.ZigString.fromUTF8(s.slice(allocator));
|
||||
return JSC.ZigString.fromUTF8(s.slice(allocator));
|
||||
} else {
|
||||
return jsc.ZigString.initUTF16(s.slice16());
|
||||
return JSC.ZigString.initUTF16(s.slice16());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1416,10 +1416,7 @@ pub const Import = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
const string = []const u8;
|
||||
const stringZ = [:0]const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
@@ -1427,20 +1424,24 @@ const bun = @import("bun");
|
||||
const ComptimeStringMap = bun.ComptimeStringMap;
|
||||
const Environment = bun.Environment;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const JSC = bun.JSC;
|
||||
const OOM = bun.OOM;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const stringZ = bun.stringZ;
|
||||
const strings = bun.strings;
|
||||
const Loader = bun.options.Loader;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
const G = js_ast.G;
|
||||
const Op = js_ast.Op;
|
||||
const OptionalChain = js_ast.OptionalChain;
|
||||
const Ref = js_ast.Ref;
|
||||
const ToJSError = js_ast.ToJSError;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -64,7 +64,7 @@ pub fn unwrapInlined(expr: Expr) Expr {
|
||||
}
|
||||
|
||||
pub fn fromBlob(
|
||||
blob: *const jsc.WebCore.Blob,
|
||||
blob: *const JSC.WebCore.Blob,
|
||||
allocator: std.mem.Allocator,
|
||||
mime_type_: ?MimeType,
|
||||
log: *logger.Log,
|
||||
@@ -96,7 +96,7 @@ pub fn fromBlob(
|
||||
|
||||
if (mime_type.category.isTextLike()) {
|
||||
var output = MutableString.initEmpty(allocator);
|
||||
try JSPrinter.quoteForJSON(bytes, &output, true);
|
||||
output = try JSPrinter.quoteForJSON(bytes, output, true);
|
||||
var list = output.toOwnedSlice();
|
||||
// remove the quotes
|
||||
if (list.len > 0) {
|
||||
@@ -108,7 +108,7 @@ pub fn fromBlob(
|
||||
return Expr.init(
|
||||
E.String,
|
||||
E.String{
|
||||
.data = try jsc.ZigString.init(bytes).toBase64DataURL(allocator),
|
||||
.data = try JSC.ZigString.init(bytes).toBase64DataURL(allocator),
|
||||
},
|
||||
loc,
|
||||
);
|
||||
@@ -147,7 +147,7 @@ pub fn hasAnyPropertyNamed(expr: *const Expr, comptime names: []const string) bo
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
return this.data.toJS(allocator, globalObject);
|
||||
}
|
||||
|
||||
@@ -2514,9 +2514,15 @@ pub const Data = union(Tag) {
|
||||
e.left.data.writeToHasher(hasher, symbol_table);
|
||||
e.right.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
.e_class => {},
|
||||
inline .e_new, .e_call => {},
|
||||
.e_function => {},
|
||||
.e_class => |e| {
|
||||
_ = e; // autofix
|
||||
},
|
||||
inline .e_new, .e_call => |e| {
|
||||
_ = e; // autofix
|
||||
},
|
||||
.e_function => |e| {
|
||||
_ = e; // autofix
|
||||
},
|
||||
.e_dot => |e| {
|
||||
writeAnyToHasher(hasher, .{ e.optional_chain, e.name.len });
|
||||
e.target.data.writeToHasher(hasher, symbol_table);
|
||||
@@ -2527,7 +2533,9 @@ pub const Data = union(Tag) {
|
||||
e.target.data.writeToHasher(hasher, symbol_table);
|
||||
e.index.data.writeToHasher(hasher, symbol_table);
|
||||
},
|
||||
.e_arrow => {},
|
||||
.e_arrow => |e| {
|
||||
_ = e; // autofix
|
||||
},
|
||||
.e_jsx_element => |e| {
|
||||
_ = e; // autofix
|
||||
},
|
||||
@@ -3064,17 +3072,17 @@ pub const Data = union(Tag) {
|
||||
return Equality.unknown;
|
||||
}
|
||||
|
||||
pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
return switch (this) {
|
||||
.e_array => |e| e.toJS(allocator, globalObject),
|
||||
.e_object => |e| e.toJS(allocator, globalObject),
|
||||
.e_string => |e| e.toJS(allocator, globalObject),
|
||||
.e_null => jsc.JSValue.null,
|
||||
.e_null => JSC.JSValue.null,
|
||||
.e_undefined => .js_undefined,
|
||||
.e_boolean => |boolean| if (boolean.value)
|
||||
jsc.JSValue.true
|
||||
JSC.JSValue.true
|
||||
else
|
||||
jsc.JSValue.false,
|
||||
JSC.JSValue.false,
|
||||
.e_number => |e| e.toJS(),
|
||||
// .e_big_int => |e| e.toJS(ctx, exception),
|
||||
|
||||
@@ -3089,7 +3097,7 @@ pub const Data = union(Tag) {
|
||||
// brk: {
|
||||
// // var node = try allocator.create(Macro.JSNode);
|
||||
// // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = logger.Loc.Empty });
|
||||
// // break :brk jsc.JSValue.c(Macro.JSNode.Class.make(globalObject, node));
|
||||
// // break :brk JSC.JSValue.c(Macro.JSNode.Class.make(globalObject, node));
|
||||
// },
|
||||
|
||||
else => {
|
||||
@@ -3193,16 +3201,17 @@ pub fn StoredData(tag: Tag) type {
|
||||
};
|
||||
}
|
||||
|
||||
extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64;
|
||||
|
||||
fn stringToEquivalentNumberValue(str: []const u8) f64 {
|
||||
// +"" -> 0
|
||||
if (str.len == 0) return 0;
|
||||
if (!bun.strings.isAllASCII(str))
|
||||
return std.math.nan(f64);
|
||||
return bun.cpp.JSC__jsToNumber(str.ptr, str.len);
|
||||
return JSC__jsToNumber(str.ptr, str.len);
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
const stringZ = [:0]const u8;
|
||||
// @sortImports
|
||||
|
||||
const JSPrinter = @import("../js_printer.zig");
|
||||
const std = @import("std");
|
||||
@@ -3210,17 +3219,19 @@ const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const JSONParser = bun.json;
|
||||
const JSC = bun.JSC;
|
||||
const JSONParser = bun.JSON;
|
||||
const MutableString = bun.MutableString;
|
||||
const OOM = bun.OOM;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const stringZ = bun.stringZ;
|
||||
const strings = bun.strings;
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
|
||||
@@ -208,16 +208,17 @@ pub const Arg = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const TypeScript = bun.js_parser.TypeScript;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const BindingNodeIndex = js_ast.BindingNodeIndex;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
|
||||
@@ -12,7 +12,7 @@ pub const MacroContext = struct {
|
||||
env: *DotEnv.Loader,
|
||||
macros: MacroMap,
|
||||
remap: MacroRemap,
|
||||
javascript_object: jsc.JSValue = jsc.JSValue.zero,
|
||||
javascript_object: JSC.JSValue = JSC.JSValue.zero,
|
||||
|
||||
pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry {
|
||||
if (this.remap.entries.len == 0) return null;
|
||||
@@ -51,7 +51,7 @@ pub const MacroContext = struct {
|
||||
bun.assert(!isMacroPath(import_record_path_without_macro_prefix));
|
||||
|
||||
const input_specifier = brk: {
|
||||
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
break :brk replacement.path;
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ pub fn init(
|
||||
defer resolver.opts.transform_options = old_transform_options;
|
||||
|
||||
// JSC needs to be initialized if building from CLI
|
||||
jsc.initialize(false);
|
||||
JSC.initialize(false);
|
||||
|
||||
var _vm = try JavaScript.VirtualMachine.init(.{
|
||||
.allocator = default_allocator,
|
||||
@@ -198,7 +198,7 @@ pub fn init(
|
||||
|
||||
const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash);
|
||||
|
||||
switch (loaded_result.unwrap(vm.jsc_vm, .leave_unhandled)) {
|
||||
switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) {
|
||||
.rejected => |result| {
|
||||
vm.unhandledRejection(vm.global, result, loaded_result.asValue());
|
||||
vm.disableMacroMode();
|
||||
@@ -214,17 +214,17 @@ pub fn init(
|
||||
}
|
||||
|
||||
pub const Runner = struct {
|
||||
const VisitMap = std.AutoHashMapUnmanaged(jsc.JSValue, Expr);
|
||||
const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr);
|
||||
|
||||
threadlocal var args_buf: [3]js.JSObjectRef = undefined;
|
||||
threadlocal var exception_holder: jsc.ZigException.Holder = undefined;
|
||||
threadlocal var exception_holder: JSC.ZigException.Holder = undefined;
|
||||
pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError;
|
||||
|
||||
pub const Run = struct {
|
||||
caller: Expr,
|
||||
function_name: string,
|
||||
macro: *const Macro,
|
||||
global: *jsc.JSGlobalObject,
|
||||
global: *JSC.JSGlobalObject,
|
||||
allocator: std.mem.Allocator,
|
||||
id: i32,
|
||||
log: *logger.Log,
|
||||
@@ -238,7 +238,7 @@ pub const Runner = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
function_name: string,
|
||||
caller: Expr,
|
||||
args: []jsc.JSValue,
|
||||
args: []JSC.JSValue,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
) MacroError!Expr {
|
||||
@@ -273,9 +273,9 @@ pub const Runner = struct {
|
||||
|
||||
pub fn run(
|
||||
this: *Run,
|
||||
value: jsc.JSValue,
|
||||
value: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
return switch ((try jsc.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) {
|
||||
return switch ((try JSC.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) {
|
||||
.Error => this.coerce(value, .Error),
|
||||
.Undefined => this.coerce(value, .Undefined),
|
||||
.Null => this.coerce(value, .Null),
|
||||
@@ -305,8 +305,8 @@ pub const Runner = struct {
|
||||
|
||||
pub fn coerce(
|
||||
this: *Run,
|
||||
value: jsc.JSValue,
|
||||
comptime tag: jsc.ConsoleObject.Formatter.Tag,
|
||||
value: JSC.JSValue,
|
||||
comptime tag: JSC.ConsoleObject.Formatter.Tag,
|
||||
) MacroError!Expr {
|
||||
switch (comptime tag) {
|
||||
.Error => {
|
||||
@@ -325,15 +325,15 @@ pub const Runner = struct {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var blob_: ?jsc.WebCore.Blob = null;
|
||||
var blob_: ?JSC.WebCore.Blob = null;
|
||||
const mime_type: ?MimeType = null;
|
||||
|
||||
if (value.jsType() == .DOMWrapper) {
|
||||
if (value.as(jsc.WebCore.Response)) |resp| {
|
||||
if (value.as(JSC.WebCore.Response)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(jsc.WebCore.Request)) |resp| {
|
||||
} else if (value.as(JSC.WebCore.Request)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(jsc.WebCore.Blob)) |resp| {
|
||||
} else if (value.as(JSC.WebCore.Blob)) |resp| {
|
||||
blob_ = resp.*;
|
||||
blob_.?.allocator = null;
|
||||
} else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) {
|
||||
@@ -366,7 +366,7 @@ pub const Runner = struct {
|
||||
.Boolean => {
|
||||
return Expr{ .data = .{ .e_boolean = .{ .value = value.toBoolean() } }, .loc = this.caller.loc };
|
||||
},
|
||||
jsc.ConsoleObject.Formatter.Tag.Array => {
|
||||
JSC.ConsoleObject.Formatter.Tag.Array => {
|
||||
this.is_top_level = false;
|
||||
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
@@ -381,7 +381,7 @@ pub const Runner = struct {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var iter = try jsc.JSArrayIterator.init(value, this.global);
|
||||
var iter = try JSC.JSArrayIterator.init(value, this.global);
|
||||
if (iter.len == 0) {
|
||||
const result = Expr.init(
|
||||
E.Array,
|
||||
@@ -418,7 +418,7 @@ pub const Runner = struct {
|
||||
return out;
|
||||
},
|
||||
// TODO: optimize this
|
||||
jsc.ConsoleObject.Formatter.Tag.Object => {
|
||||
JSC.ConsoleObject.Formatter.Tag.Object => {
|
||||
this.is_top_level = false;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
@@ -433,7 +433,7 @@ pub const Runner = struct {
|
||||
}
|
||||
// SAFETY: tag ensures `value` is an object.
|
||||
const obj = value.getObject() orelse unreachable;
|
||||
var object_iter = try jsc.JSPropertyIterator(.{
|
||||
var object_iter = try JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(this.global, obj);
|
||||
@@ -466,7 +466,7 @@ pub const Runner = struct {
|
||||
// if (console_tag.cell == .JSDate) {
|
||||
// // in the code for printing dates, it never exceeds this amount
|
||||
// var iso_string_buf = this.allocator.alloc(u8, 36) catch unreachable;
|
||||
// var str = jsc.ZigString.init("");
|
||||
// var str = JSC.ZigString.init("");
|
||||
// value.jsonStringify(this.global, 0, &str);
|
||||
// var out_buf: []const u8 = std.fmt.bufPrint(iso_string_buf, "{}", .{str}) catch "";
|
||||
// if (out_buf.len > 2) {
|
||||
@@ -502,8 +502,8 @@ pub const Runner = struct {
|
||||
|
||||
this.macro.vm.waitForPromise(promise);
|
||||
|
||||
const promise_result = promise.result(this.macro.vm.jsc_vm);
|
||||
const rejected = promise.status(this.macro.vm.jsc_vm) == .rejected;
|
||||
const promise_result = promise.result(this.macro.vm.jsc);
|
||||
const rejected = promise.status(this.macro.vm.jsc) == .rejected;
|
||||
|
||||
if (promise_result.isUndefined() and this.is_top_level) {
|
||||
this.is_top_level = false;
|
||||
@@ -542,12 +542,12 @@ pub const Runner = struct {
|
||||
caller: Expr,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
javascript_object: jsc.JSValue,
|
||||
javascript_object: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
if (comptime Environment.isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{s}<r>", .{function_name});
|
||||
|
||||
exception_holder = jsc.ZigException.Holder.init();
|
||||
var js_args: []jsc.JSValue = &.{};
|
||||
exception_holder = JSC.ZigException.Holder.init();
|
||||
var js_args: []JSC.JSValue = &.{};
|
||||
var js_processed_args_len: usize = 0;
|
||||
defer {
|
||||
for (js_args[0..js_processed_args_len -| @as(usize, @intFromBool(javascript_object != .zero))]) |arg| {
|
||||
@@ -557,12 +557,12 @@ pub const Runner = struct {
|
||||
allocator.free(js_args);
|
||||
}
|
||||
|
||||
const globalObject = jsc.VirtualMachine.get().global;
|
||||
const globalObject = JSC.VirtualMachine.get().global;
|
||||
|
||||
switch (caller.data) {
|
||||
.e_call => |call| {
|
||||
const call_args: []Expr = call.args.slice();
|
||||
js_args = try allocator.alloc(jsc.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero)));
|
||||
js_args = try allocator.alloc(JSC.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero)));
|
||||
js_processed_args_len = js_args.len;
|
||||
|
||||
for (0.., call_args, js_args[0..call_args.len]) |i, in, *out| {
|
||||
@@ -589,7 +589,7 @@ pub const Runner = struct {
|
||||
|
||||
if (javascript_object != .zero) {
|
||||
if (js_args.len == 0) {
|
||||
js_args = try allocator.alloc(jsc.JSValue, 1);
|
||||
js_args = try allocator.alloc(JSC.JSValue, 1);
|
||||
}
|
||||
|
||||
js_args[js_args.len - 1] = javascript_object;
|
||||
@@ -601,9 +601,9 @@ pub const Runner = struct {
|
||||
threadlocal var call_args: CallArgs = undefined;
|
||||
threadlocal var result: MacroError!Expr = undefined;
|
||||
pub fn callWrapper(args: CallArgs) MacroError!Expr {
|
||||
jsc.markBinding(@src());
|
||||
JSC.markBinding(@src());
|
||||
call_args = args;
|
||||
Bun__startMacro(&call, jsc.VirtualMachine.get().global);
|
||||
Bun__startMacro(&call, JSC.VirtualMachine.get().global);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -631,7 +631,7 @@ pub const Runner = struct {
|
||||
extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void;
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const DotEnv = @import("../env_loader.zig");
|
||||
const std = @import("std");
|
||||
@@ -650,12 +650,17 @@ const Output = bun.Output;
|
||||
const Transpiler = bun.Transpiler;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const Loader = bun.options.Loader;
|
||||
const MimeType = bun.http.MimeType;
|
||||
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const JSC = bun.JSC;
|
||||
const JavaScript = bun.JSC;
|
||||
const js = bun.JSC.C;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
@@ -664,7 +669,3 @@ const Macro = js_ast.Macro;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const ToJSError = js_ast.ToJSError;
|
||||
|
||||
const JavaScript = bun.jsc;
|
||||
const jsc = bun.jsc;
|
||||
const js = bun.jsc.C;
|
||||
|
||||
@@ -164,6 +164,8 @@ pub fn NewStore(comptime types: []const type, comptime count: usize) type {
|
||||
};
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -281,11 +281,13 @@ pub const Table = brk: {
|
||||
break :brk table;
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const AssignTarget = js_ast.AssignTarget;
|
||||
const Op = js_ast.Op;
|
||||
|
||||
@@ -212,12 +212,13 @@ pub const Continue = struct {
|
||||
label: ?LocRef = null,
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Case = js_ast.Case;
|
||||
const Catch = js_ast.Catch;
|
||||
const ClauseItem = js_ast.ClauseItem;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user