mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
2 Commits
ciro/fix-a
...
zack/fix-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eec0b57e65 | ||
|
|
0094e15f80 |
@@ -127,11 +127,8 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
@@ -569,7 +566,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
@@ -360,8 +360,9 @@ JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject *
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(
|
||||
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
scope.release();
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
|
||||
30
.github/workflows/format.yml
vendored
30
.github/workflows/format.yml
vendored
@@ -1,30 +1,28 @@
|
||||
name: autofix.ci
|
||||
name: format
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
autofix:
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
@@ -46,13 +44,25 @@ jobs:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
./scripts/sort-imports.ts src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run zig-format`"
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run prettier`"
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run clang-format`"
|
||||
|
||||
32
.github/workflows/run-lint.yml
vendored
Normal file
32
.github/workflows/run-lint.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Lint
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
text_output: ${{ steps.lint.outputs.text_output }}
|
||||
json_output: ${{ steps.lint.outputs.json_output }}
|
||||
count: ${{ steps.lint.outputs.count }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
bun --cwd=packages/bun-internal-test install
|
||||
- name: Lint
|
||||
id: lint
|
||||
run: |
|
||||
bun packages/bun-internal-test/src/linter.ts
|
||||
99
.github/workflows/update-hdrhistogram.yml
vendored
99
.github/workflows/update-hdrhistogram.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update hdrhistogram
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check hdrhistogram version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHdrHistogram.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHdrHistogram.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHdrHistogram.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/HdrHistogram/HdrHistogram_c/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHdrHistogram.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHdrHistogram.cmake
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates hdrhistogram to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
99
.github/workflows/update-highway.yml
vendored
99
.github/workflows/update-highway.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update highway
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check highway version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHighway.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHighway.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHighway.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/google/highway/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHighway.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHighway.cmake
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates highway to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
47
.github/workflows/vscode-release.yml
vendored
47
.github/workflows/vscode-release.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -168,5 +168,4 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
// "bun.test.customScript": "./build/debug/bun-debug test"
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~5 minutes. Don't timeout, be patient.
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
@@ -160,7 +160,6 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
|
||||
Binary file not shown.
@@ -28,7 +28,9 @@ if (+(existingUsers?.[0]?.count ?? existingUsers?.count) < 100) {
|
||||
}));
|
||||
|
||||
// Insert all users
|
||||
await sql`INSERT INTO users_bun_bench ${sql(users)}`;
|
||||
await sql`
|
||||
INSERT INTO users_bun_bench (first_name, last_name, email, dob) ${sql(users)}
|
||||
`;
|
||||
}
|
||||
|
||||
const type = isBun ? "Bun.sql" : "postgres";
|
||||
|
||||
@@ -9,6 +9,6 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"postgres": "^3.4.7"
|
||||
"postgres": "^3.4.5"
|
||||
}
|
||||
}
|
||||
@@ -752,13 +752,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
{
|
||||
const cppImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
|
||||
});
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
|
||||
10
bun.lock
10
bun.lock
@@ -4,8 +4,6 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -89,14 +87,6 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
|
||||
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
|
||||
@@ -7,6 +7,3 @@
|
||||
# Instead, we can only scan the test directory for Bun's runtime tests
|
||||
root = "test"
|
||||
preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
|
||||
@@ -95,7 +95,7 @@ if(LINUX)
|
||||
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
if(DEBUG AND APPLE AND ARCH STREQUAL "aarch64")
|
||||
set(DEFAULT_ASAN ON)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
@@ -139,10 +139,10 @@ endif()
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
setx(NODEJS_VERSION "24.3.0")
|
||||
optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "24.3.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
setx(NODEJS_ABI_VERSION "137")
|
||||
optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "137")
|
||||
|
||||
if(APPLE)
|
||||
set(DEFAULT_STATIC_SQLITE OFF)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
|
||||
@@ -8,14 +8,11 @@ src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
src/codegen/generate-js2native.ts
|
||||
src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/linux_memfd_allocator.zig
|
||||
src/allocators/max_heap_allocator.zig
|
||||
src/allocators/memory_allocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc_arena.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
@@ -34,30 +33,20 @@ src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/bake.zig
|
||||
src/baby_list.zig
|
||||
src/bake/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bit_set.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun.js.zig
|
||||
src/bun_js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/dns_resolver.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
@@ -107,7 +96,6 @@ src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -254,6 +242,7 @@ src/bun.js/test/jest.zig
|
||||
src/bun.js/test/pretty_format.zig
|
||||
src/bun.js/test/snapshot.zig
|
||||
src/bun.js/test/test.zig
|
||||
src/bun.js/unbounded_queue.zig
|
||||
src/bun.js/uuid.zig
|
||||
src/bun.js/virtual_machine_exports.zig
|
||||
src/bun.js/VirtualMachine.zig
|
||||
@@ -292,6 +281,7 @@ src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
@@ -315,14 +305,12 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/StaticRouteVisitor.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
@@ -355,11 +343,9 @@ src/cli/pack_command.zig
|
||||
src/cli/package_manager_command.zig
|
||||
src/cli/patch_command.zig
|
||||
src/cli/patch_commit_command.zig
|
||||
src/cli/pm_pkg_command.zig
|
||||
src/cli/pm_trusted_command.zig
|
||||
src/cli/pm_version_command.zig
|
||||
src/cli/pm_view_command.zig
|
||||
src/cli/pm_why_command.zig
|
||||
src/cli/publish_command.zig
|
||||
src/cli/remove_command.zig
|
||||
src/cli/run_command.zig
|
||||
@@ -368,15 +354,8 @@ src/cli/test_command.zig
|
||||
src/cli/test/Scanner.zig
|
||||
src/cli/unlink_command.zig
|
||||
src/cli/update_command.zig
|
||||
src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -525,19 +504,23 @@ src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/exact_size_matcher.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/futex.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/grapheme.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
@@ -555,7 +538,6 @@ src/http/HTTPThread.zig
|
||||
src/http/InitError.zig
|
||||
src/http/InternalState.zig
|
||||
src/http/Method.zig
|
||||
src/http/mime_type_list_enum.zig
|
||||
src/http/MimeType.zig
|
||||
src/http/ProxyTunnel.zig
|
||||
src/http/SendFile.zig
|
||||
@@ -581,7 +563,6 @@ src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/isolated_install.zig
|
||||
src/install/isolated_install/FileCopier.zig
|
||||
src/install/isolated_install/Hardlinker.zig
|
||||
src/install/isolated_install/Installer.zig
|
||||
src/install/isolated_install/Store.zig
|
||||
@@ -632,10 +613,6 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
@@ -644,12 +621,14 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/json_parser.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
@@ -661,6 +640,8 @@ src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/multi_array_list.zig
|
||||
src/Mutex.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
@@ -672,7 +653,6 @@ src/paths.zig
|
||||
src/paths/EnvPath.zig
|
||||
src/paths/path_buffer_pool.zig
|
||||
src/paths/Path.zig
|
||||
src/pe.zig
|
||||
src/perf.zig
|
||||
src/pool.zig
|
||||
src/Progress.zig
|
||||
@@ -835,36 +815,30 @@ src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
src/threading.zig
|
||||
src/threading/channel.zig
|
||||
src/threading/Condition.zig
|
||||
src/threading/Futex.zig
|
||||
src/threading/guarded_value.zig
|
||||
src/threading/Mutex.zig
|
||||
src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/thread_pool.zig
|
||||
src/tmp.zig
|
||||
src/toml/toml_lexer.zig
|
||||
src/toml/toml_parser.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
|
||||
@@ -255,10 +255,6 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
|
||||
@@ -312,27 +308,6 @@ set(BUN_JAVASCRIPT_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
)
|
||||
|
||||
set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
COMMENT
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -562,7 +537,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -632,7 +606,6 @@ register_command(
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
bun-cppbind
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -645,6 +618,10 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
@@ -708,7 +685,7 @@ if(WIN32)
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc ${CWD}/src/bun.exe.manifest)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -981,16 +958,6 @@ if(APPLE)
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
# Suppress ALL linker warnings on macOS.
|
||||
# The intent is to only suppress linker alignment warnings.
|
||||
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
|
||||
# If you find one, please update this to only be for linker alignment.
|
||||
-Wl,-w
|
||||
)
|
||||
endif()
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
7118f97c26bf0b2f426728b482f86508efc81d02
|
||||
898dc8319355b7e985f68a9819f182aaed61b53a
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
@@ -20,14 +20,11 @@ register_cmake_command(
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_UNZIP=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_LIBB2=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_WIN32_XMLLITE=OFF
|
||||
-DENABLE_LZ4=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
set(WEBKIT_VERSION 9141ee4897bffa8dd020b3ac33fa81d8081d6827)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
@@ -2,25 +2,6 @@
|
||||
**Note** — Bun provides a browser- and Node.js-compatible [console](https://developer.mozilla.org/en-US/docs/Web/API/console) global. This page only documents Bun-native APIs.
|
||||
{% /callout %}
|
||||
|
||||
## Object inspection depth
|
||||
|
||||
Bun allows you to configure how deeply nested objects are displayed in `console.log()` output:
|
||||
|
||||
- **CLI flag**: Use `--console-depth <number>` to set the depth for a single run
|
||||
- **Configuration**: Set `console.depth` in your `bunfig.toml` for persistent configuration
|
||||
- **Default**: Objects are inspected to a depth of `2` levels
|
||||
|
||||
```js
|
||||
const nested = { a: { b: { c: { d: "deep" } } } };
|
||||
console.log(nested);
|
||||
// Default (depth 2): { a: { b: [Object] } }
|
||||
// With depth 4: { a: { b: { c: { d: 'deep' } } } }
|
||||
```
|
||||
|
||||
The CLI flag takes precedence over the configuration file setting.
|
||||
|
||||
## Reading from stdin
|
||||
|
||||
In Bun, the `console` object can be used as an `AsyncIterable` to sequentially read lines from `process.stdin`.
|
||||
|
||||
```ts
|
||||
|
||||
@@ -274,23 +274,6 @@ If no connection URL is provided, the system checks for the following individual
|
||||
| `PGPASSWORD` | - | (empty) | Database password |
|
||||
| `PGDATABASE` | - | username | Database name |
|
||||
|
||||
## Runtime Preconnection
|
||||
|
||||
Bun can preconnect to PostgreSQL at startup to improve performance by establishing database connections before your application code runs. This is useful for reducing connection latency on the first database query.
|
||||
|
||||
```bash
|
||||
# Enable PostgreSQL preconnection
|
||||
bun --sql-preconnect index.js
|
||||
|
||||
# Works with DATABASE_URL environment variable
|
||||
DATABASE_URL=postgres://user:pass@localhost:5432/db bun --sql-preconnect index.js
|
||||
|
||||
# Can be combined with other runtime flags
|
||||
bun --sql-preconnect --hot index.js
|
||||
```
|
||||
|
||||
The `--sql-preconnect` flag will automatically establish a PostgreSQL connection using your configured environment variables at startup. If the connection fails, it won't crash your application - the error will be handled gracefully.
|
||||
|
||||
## Connection Options
|
||||
|
||||
You can configure your database connection manually by passing options to the SQL constructor:
|
||||
|
||||
@@ -88,20 +88,6 @@ The order of the `--target` flag does not matter, as long as they're delimited b
|
||||
|
||||
On x64 platforms, Bun uses SIMD optimizations which require a modern CPU supporting AVX2 instructions. The `-baseline` build of Bun is for older CPUs that don't support these optimizations. Normally, when you install Bun we automatically detect which version to use but this can be harder to do when cross-compiling since you might not know the target CPU. You usually don't need to worry about it on Darwin x64, but it is relevant for Windows x64 and Linux x64. If you or your users see `"Illegal instruction"` errors, you might need to use the baseline version.
|
||||
|
||||
## Build-time constants
|
||||
|
||||
Use the `--define` flag to inject build-time constants into your executable, such as version numbers, build timestamps, or configuration values:
|
||||
|
||||
```bash
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
These constants are embedded directly into your compiled binary at build time, providing zero runtime overhead and enabling dead code elimination optimizations.
|
||||
|
||||
{% callout type="info" %}
|
||||
For comprehensive examples and advanced patterns, see the [Build-time constants guide](/guides/runtime/build-time-constants).
|
||||
{% /callout %}
|
||||
|
||||
## Deploying to production
|
||||
|
||||
Compiled executables reduce memory usage and improve Bun's start time.
|
||||
|
||||
@@ -183,30 +183,6 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
|
||||
}
|
||||
```
|
||||
|
||||
## Installation strategies
|
||||
|
||||
Bun supports two package installation strategies that determine how dependencies are organized in `node_modules`:
|
||||
|
||||
### Hoisted installs (default for single projects)
|
||||
|
||||
The traditional npm/Yarn approach that flattens dependencies into a shared `node_modules` directory:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Isolated installs
|
||||
|
||||
A pnpm-like approach that creates strict dependency isolation to prevent phantom dependencies:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create a central package store in `node_modules/.bun/` with symlinks in the top-level `node_modules`. This ensures packages can only access their declared dependencies.
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -237,15 +213,11 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline:
|
||||
Looking to speed up your CI? Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
@@ -264,31 +236,4 @@ jobs:
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
For CI/CD environments that want to enforce reproducible builds, use `bun ci` to fail the build if the package.json is out of sync with the lockfile:
|
||||
|
||||
```bash
|
||||
$ bun ci
|
||||
```
|
||||
|
||||
This is equivalent to `bun install --frozen-lockfile`. It installs exact versions from `bun.lock` and fails if `package.json` doesn't match the lockfile. To use `bun ci` or `bun install --frozen-lockfile`, you must commit `bun.lock` to version control.
|
||||
|
||||
And instead of running `bun install`, run `bun ci`.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
jobs:
|
||||
build:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies
|
||||
run: bun ci
|
||||
- name: Build app
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="install" /%}
|
||||
|
||||
102
docs/cli/pm.md
102
docs/cli/pm.md
@@ -8,70 +8,15 @@ To create a tarball of the current workspace:
|
||||
$ bun pm pack
|
||||
```
|
||||
|
||||
This command creates a `.tgz` file containing all files that would be published to npm, following the same rules as `npm pack`.
|
||||
Options for the `pack` command:
|
||||
|
||||
## Examples
|
||||
|
||||
Basic usage:
|
||||
|
||||
```bash
|
||||
$ bun pm pack
|
||||
# Creates my-package-1.0.0.tgz in current directory
|
||||
```
|
||||
|
||||
Quiet mode for scripting:
|
||||
|
||||
```bash
|
||||
$ TARBALL=$(bun pm pack --quiet)
|
||||
$ echo "Created: $TARBALL"
|
||||
# Output: Created: my-package-1.0.0.tgz
|
||||
```
|
||||
|
||||
Custom destination:
|
||||
|
||||
```bash
|
||||
$ bun pm pack --destination ./dist
|
||||
# Saves tarball in ./dist/ directory
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
- `--dry-run`: Perform all tasks except writing the tarball to disk. Shows what would be included.
|
||||
- `--destination <dir>`: Specify the directory where the tarball will be saved.
|
||||
- `--filename <name>`: Specify an exact file name for the tarball to be saved at.
|
||||
- `--dry-run`: Perform all tasks except writing the tarball to disk.
|
||||
- `--destination`: Specify the directory where the tarball will be saved.
|
||||
- `--filename`: Specify an exact file name for the tarball to be saved at.
|
||||
- `--ignore-scripts`: Skip running pre/postpack and prepare scripts.
|
||||
- `--gzip-level <0-9>`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
|
||||
- `--quiet`: Only output the tarball filename, suppressing verbose output. Ideal for scripts and automation.
|
||||
- `--gzip-level`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
|
||||
|
||||
> **Note:** `--filename` and `--destination` cannot be used at the same time.
|
||||
|
||||
## Output Modes
|
||||
|
||||
**Default output:**
|
||||
|
||||
```bash
|
||||
$ bun pm pack
|
||||
bun pack v1.2.19
|
||||
|
||||
packed 131B package.json
|
||||
packed 40B index.js
|
||||
|
||||
my-package-1.0.0.tgz
|
||||
|
||||
Total files: 2
|
||||
Shasum: f2451d6eb1e818f500a791d9aace80b394258a90
|
||||
Unpacked size: 171B
|
||||
Packed size: 249B
|
||||
```
|
||||
|
||||
**Quiet output:**
|
||||
|
||||
```bash
|
||||
$ bun pm pack --quiet
|
||||
my-package-1.0.0.tgz
|
||||
```
|
||||
|
||||
The `--quiet` flag is particularly useful for automation workflows where you need to capture the generated tarball filename for further processing.
|
||||
> Note `--filename` and `--destination` cannot be used at the same time
|
||||
|
||||
## bin
|
||||
|
||||
@@ -248,38 +193,3 @@ v1.0.1
|
||||
```
|
||||
|
||||
Supports `patch`, `minor`, `major`, `premajor`, `preminor`, `prepatch`, `prerelease`, `from-git`, or specific versions like `1.2.3`. By default creates git commit and tag unless `--no-git-tag-version` was used to skip.
|
||||
|
||||
## pkg
|
||||
|
||||
Manage `package.json` data with get, set, delete, and fix operations.
|
||||
|
||||
All commands support dot and bracket notation:
|
||||
|
||||
```bash
|
||||
scripts.build # dot notation
|
||||
contributors[0] # array access
|
||||
workspaces.0 # dot with numeric index
|
||||
scripts[test:watch] # bracket for special chars
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```bash
|
||||
# set
|
||||
$ bun pm pkg get name # single property
|
||||
$ bun pm pkg get name version # multiple properties
|
||||
$ bun pm pkg get # entire package.json
|
||||
$ bun pm pkg get scripts.build # nested property
|
||||
|
||||
# set
|
||||
$ bun pm pkg set name="my-package" # simple property
|
||||
$ bun pm pkg set scripts.test="jest" version=2.0.0 # multiple properties
|
||||
$ bun pm pkg set {"private":"true"} --json # JSON values with --json flag
|
||||
|
||||
# delete
|
||||
$ bun pm pkg delete description # single property
|
||||
$ bun pm pkg delete scripts.test contributors[0] # multiple/nested
|
||||
|
||||
# fix
|
||||
$ bun pm pkg fix # auto-fix common issues
|
||||
```
|
||||
|
||||
@@ -185,23 +185,6 @@ This is TypeScript!
|
||||
|
||||
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
|
||||
|
||||
## `bun run --console-depth`
|
||||
|
||||
Control the depth of object inspection in console output with the `--console-depth` flag.
|
||||
|
||||
```bash
|
||||
$ bun --console-depth 5 run index.tsx
|
||||
```
|
||||
|
||||
This sets how deeply nested objects are displayed in `console.log()` output. The default depth is `2`. Higher values show more nested properties but may produce verbose output for complex objects.
|
||||
|
||||
```js
|
||||
const nested = { a: { b: { c: { d: "deep" } } } };
|
||||
console.log(nested);
|
||||
// With --console-depth 2 (default): { a: { b: [Object] } }
|
||||
// With --console-depth 4: { a: { b: { c: { d: 'deep' } } } }
|
||||
```
|
||||
|
||||
## `bun run --smol`
|
||||
|
||||
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
|
||||
|
||||
@@ -248,33 +248,4 @@ $ bun test foo
|
||||
|
||||
Any test file in the directory with an _absolute path_ that contains one of the targets will run. Glob patterns are not yet supported. -->
|
||||
|
||||
## AI Agent Integration
|
||||
|
||||
When using Bun's test runner with AI coding assistants, you can enable quieter output to improve readability and reduce context noise. This feature minimizes test output verbosity while preserving essential failure information.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set any of the following environment variables to enable AI-friendly output:
|
||||
|
||||
- `CLAUDECODE=1` - For Claude Code
|
||||
- `REPL_ID=1` - For Replit
|
||||
- `AGENT=1` - Generic AI agent flag
|
||||
|
||||
### Behavior
|
||||
|
||||
When an AI agent environment is detected:
|
||||
|
||||
- Only test failures are displayed in detail
|
||||
- Passing, skipped, and todo test indicators are hidden
|
||||
- Summary statistics remain intact
|
||||
|
||||
```bash
|
||||
# Example: Enable quiet output for Claude Code
|
||||
$ CLAUDECODE=1 bun test
|
||||
|
||||
# Still shows failures and summary, but hides verbose passing test output
|
||||
```
|
||||
|
||||
This feature is particularly useful in AI-assisted development workflows where reduced output verbosity improves context efficiency while maintaining visibility into test failures.
|
||||
|
||||
{% bunCLIUsage command="test" /%}
|
||||
|
||||
@@ -10,86 +10,6 @@ To update a specific dependency to the latest version:
|
||||
$ bun update [package]
|
||||
```
|
||||
|
||||
## `--interactive`
|
||||
|
||||
For a more controlled update experience, use the `--interactive` flag to select which packages to update:
|
||||
|
||||
```sh
|
||||
$ bun update --interactive
|
||||
$ bun update -i
|
||||
```
|
||||
|
||||
This launches an interactive terminal interface that shows all outdated packages with their current and target versions. You can then select which packages to update.
|
||||
|
||||
### Interactive Interface
|
||||
|
||||
The interface displays packages grouped by dependency type:
|
||||
|
||||
```
|
||||
? Select packages to update - Space to toggle, Enter to confirm, a to select all, n to select none, i to invert, l to toggle latest
|
||||
|
||||
dependencies Current Target Latest
|
||||
□ react 17.0.2 18.2.0 18.3.1
|
||||
□ lodash 4.17.20 4.17.21 4.17.21
|
||||
|
||||
devDependencies Current Target Latest
|
||||
□ typescript 4.8.0 5.0.0 5.3.3
|
||||
□ @types/node 16.11.7 18.0.0 20.11.5
|
||||
|
||||
optionalDependencies Current Target Latest
|
||||
□ some-optional-package 1.0.0 1.1.0 1.2.0
|
||||
```
|
||||
|
||||
**Sections:**
|
||||
|
||||
- Packages are grouped under section headers: `dependencies`, `devDependencies`, `peerDependencies`, `optionalDependencies`
|
||||
- Each section shows column headers aligned with the package data
|
||||
|
||||
**Columns:**
|
||||
|
||||
- **Package**: Package name (may have suffix like ` dev`, ` peer`, ` optional` for clarity)
|
||||
- **Current**: Currently installed version
|
||||
- **Target**: Version that would be installed (respects semver constraints)
|
||||
- **Latest**: Latest available version
|
||||
|
||||
### Keyboard Controls
|
||||
|
||||
**Selection:**
|
||||
|
||||
- **Space**: Toggle package selection
|
||||
- **Enter**: Confirm selections and update
|
||||
- **a/A**: Select all packages
|
||||
- **n/N**: Select none
|
||||
- **i/I**: Invert selection
|
||||
|
||||
**Navigation:**
|
||||
|
||||
- **↑/↓ Arrow keys** or **j/k**: Move cursor
|
||||
- **l/L**: Toggle between target and latest version for current package
|
||||
|
||||
**Exit:**
|
||||
|
||||
- **Ctrl+C** or **Ctrl+D**: Cancel without updating
|
||||
|
||||
### Visual Indicators
|
||||
|
||||
- **☑** Selected packages (will be updated)
|
||||
- **□** Unselected packages
|
||||
- **>** Current cursor position
|
||||
- **Colors**: Red (major), yellow (minor), green (patch) version changes
|
||||
- **Underlined**: Currently selected update target
|
||||
|
||||
### Package Grouping
|
||||
|
||||
Packages are organized in sections by dependency type:
|
||||
|
||||
- **dependencies** - Regular runtime dependencies
|
||||
- **devDependencies** - Development dependencies
|
||||
- **peerDependencies** - Peer dependencies
|
||||
- **optionalDependencies** - Optional dependencies
|
||||
|
||||
Within each section, individual packages may have additional suffixes (` dev`, ` peer`, ` optional`) for extra clarity.
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
@@ -100,8 +20,6 @@ To update to the latest version, regardless of if it's compatible with the curre
|
||||
$ bun update --latest
|
||||
```
|
||||
|
||||
In interactive mode, you can toggle individual packages between their target version (respecting semver) and latest version using the **l** key.
|
||||
|
||||
For example, with the following `package.json`:
|
||||
|
||||
```json
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
The `bun why` command explains why a package is installed in your project by showing the dependency chain that led to its installation.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
$ bun why <package>
|
||||
```
|
||||
|
||||
## Arguments
|
||||
|
||||
- `<package>`: The name of the package to explain. Supports glob patterns like `@org/*` or `*-lodash`.
|
||||
|
||||
## Options
|
||||
|
||||
- `--top`: Show only the top-level dependencies instead of the complete dependency tree.
|
||||
- `--depth <number>`: Maximum depth of the dependency tree to display.
|
||||
|
||||
## Examples
|
||||
|
||||
Check why a specific package is installed:
|
||||
|
||||
```bash
|
||||
$ bun why react
|
||||
react@18.2.0
|
||||
└─ my-app@1.0.0 (requires ^18.0.0)
|
||||
```
|
||||
|
||||
Check why all packages with a specific pattern are installed:
|
||||
|
||||
```bash
|
||||
$ bun why "@types/*"
|
||||
@types/react@18.2.15
|
||||
└─ dev my-app@1.0.0 (requires ^18.0.0)
|
||||
|
||||
@types/react-dom@18.2.7
|
||||
└─ dev my-app@1.0.0 (requires ^18.0.0)
|
||||
```
|
||||
|
||||
Show only top-level dependencies:
|
||||
|
||||
```bash
|
||||
$ bun why express --top
|
||||
express@4.18.2
|
||||
└─ my-app@1.0.0 (requires ^4.18.2)
|
||||
```
|
||||
|
||||
Limit the dependency tree depth:
|
||||
|
||||
```bash
|
||||
$ bun why express --depth 2
|
||||
express@4.18.2
|
||||
└─ express-pollyfill@1.20.1 (requires ^4.18.2)
|
||||
└─ body-parser@1.20.1 (requires ^1.20.1)
|
||||
└─ accepts@1.3.8 (requires ^1.3.8)
|
||||
└─ (deeper dependencies hidden)
|
||||
```
|
||||
|
||||
## Understanding the Output
|
||||
|
||||
The output shows:
|
||||
|
||||
- The package name and version being queried
|
||||
- The dependency chain that led to its installation
|
||||
- The type of dependency (dev, peer, optional, or production)
|
||||
- The version requirement specified in each package's dependencies
|
||||
|
||||
For nested dependencies, the command shows the complete dependency tree by default, with indentation indicating the relationship hierarchy.
|
||||
@@ -40,7 +40,6 @@ Open `prisma/schema.prisma` and add a simple `User` model.
|
||||
```prisma-diff#prisma/schema.prisma
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
output = "../generated/prisma"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
@@ -79,7 +78,7 @@ migrations/
|
||||
|
||||
Your database is now in sync with your schema.
|
||||
|
||||
✔ Generated Prisma Client (v6.11.1) to ./generated/prisma in 41ms
|
||||
✔ Generated Prisma Client (v5.3.1) to ./node_modules/@prisma/client in 41ms
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
---
|
||||
name: Build-time constants with --define
|
||||
---
|
||||
|
||||
The `--define` flag can be used with `bun build` and `bun build --compile` to inject build-time constants into your application. This is especially useful for embedding metadata like build versions, timestamps, or configuration flags directly into your compiled executables.
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/index.ts --outfile myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Why use build-time constants?
|
||||
|
||||
Build-time constants are embedded directly into your compiled code, making them:
|
||||
|
||||
- **Zero runtime overhead** - No environment variable lookups or file reads
|
||||
- **Immutable** - Values are baked into the binary at compile time
|
||||
- **Optimizable** - Dead code elimination can remove unused branches
|
||||
- **Secure** - No external dependencies or configuration files to manage
|
||||
|
||||
This is similar to `gcc -D` or `#define` in C/C++, but for JavaScript/TypeScript.
|
||||
|
||||
---
|
||||
|
||||
## Basic usage
|
||||
|
||||
### With `bun build`
|
||||
|
||||
```sh
|
||||
# Bundle with build-time constants
|
||||
$ bun build --define BUILD_VERSION='"1.0.0"' --define NODE_ENV='"production"' src/index.ts --outdir ./dist
|
||||
```
|
||||
|
||||
### With `bun build --compile`
|
||||
|
||||
```sh
|
||||
# Compile to executable with build-time constants
|
||||
$ bun build --compile --define BUILD_VERSION='"1.0.0"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### JavaScript API
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: '"1.0.0"',
|
||||
BUILD_TIME: '"2024-01-15T10:30:00Z"',
|
||||
DEBUG: "false",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common use cases
|
||||
|
||||
### Version information
|
||||
|
||||
Embed version and build metadata directly into your executable:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/version.ts
|
||||
// These constants are replaced at build time
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const GIT_COMMIT: string;
|
||||
|
||||
export function getVersion() {
|
||||
return {
|
||||
version: BUILD_VERSION,
|
||||
buildTime: BUILD_TIME,
|
||||
commit: GIT_COMMIT,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```sh#Build command
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION='"1.2.3"' \
|
||||
--define BUILD_TIME='"2024-01-15T10:30:00Z"' \
|
||||
--define GIT_COMMIT='"abc123"' \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Feature flags
|
||||
|
||||
Use build-time constants to enable/disable features:
|
||||
|
||||
```ts
|
||||
// Replaced at build time
|
||||
declare const ENABLE_ANALYTICS: boolean;
|
||||
declare const ENABLE_DEBUG: boolean;
|
||||
|
||||
function trackEvent(event: string) {
|
||||
if (ENABLE_ANALYTICS) {
|
||||
// This entire block is removed if ENABLE_ANALYTICS is false
|
||||
console.log("Tracking:", event);
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_DEBUG) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
# Production build - analytics enabled, debug disabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=true --define ENABLE_DEBUG=false src/app.ts --outfile app-prod
|
||||
|
||||
# Development build - both enabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=false --define ENABLE_DEBUG=true src/app.ts --outfile app-dev
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Replace configuration objects at build time:
|
||||
|
||||
```ts
|
||||
declare const CONFIG: {
|
||||
apiUrl: string;
|
||||
timeout: number;
|
||||
retries: number;
|
||||
};
|
||||
|
||||
// CONFIG is replaced with the actual object at build time
|
||||
const response = await fetch(CONFIG.apiUrl, {
|
||||
timeout: CONFIG.timeout,
|
||||
});
|
||||
```
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define 'CONFIG={"apiUrl":"https://api.example.com","timeout":5000,"retries":3}' src/app.ts --outfile app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced patterns
|
||||
|
||||
### Environment-specific builds
|
||||
|
||||
Create different executables for different environments:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build:dev": "bun build --compile --define NODE_ENV='\"development\"' --define API_URL='\"http://localhost:3000\"' src/app.ts --outfile app-dev",
|
||||
"build:staging": "bun build --compile --define NODE_ENV='\"staging\"' --define API_URL='\"https://staging.example.com\"' src/app.ts --outfile app-staging",
|
||||
"build:prod": "bun build --compile --define NODE_ENV='\"production\"' --define API_URL='\"https://api.example.com\"' src/app.ts --outfile app-prod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using shell commands for dynamic values
|
||||
|
||||
Generate build-time constants from shell commands:
|
||||
|
||||
```sh
|
||||
# Use git to get current commit and timestamp
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION="\"$(git describe --tags --always)\"" \
|
||||
--define BUILD_TIME="\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"" \
|
||||
--define GIT_COMMIT="\"$(git rev-parse HEAD)\"" \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### Build automation script
|
||||
|
||||
Create a build script that automatically injects build metadata:
|
||||
|
||||
```ts
|
||||
// build.ts
|
||||
import { $ } from "bun";
|
||||
|
||||
const version = await $`git describe --tags --always`.text();
|
||||
const buildTime = new Date().toISOString();
|
||||
const gitCommit = await $`git rev-parse HEAD`.text();
|
||||
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/cli.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: JSON.stringify(version.trim()),
|
||||
BUILD_TIME: JSON.stringify(buildTime),
|
||||
GIT_COMMIT: JSON.stringify(gitCommit.trim()),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Built with version ${version.trim()}`);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Important considerations
|
||||
|
||||
### Value format
|
||||
|
||||
Values must be valid JSON that will be parsed and inlined as JavaScript expressions:
|
||||
|
||||
```sh
|
||||
# ✅ Strings must be JSON-quoted
|
||||
--define VERSION='"1.0.0"'
|
||||
|
||||
# ✅ Numbers are JSON literals
|
||||
--define PORT=3000
|
||||
|
||||
# ✅ Booleans are JSON literals
|
||||
--define DEBUG=true
|
||||
|
||||
# ✅ Objects and arrays (use single quotes to wrap the JSON)
|
||||
--define 'CONFIG={"host":"localhost","port":3000}'
|
||||
|
||||
# ✅ Arrays work too
|
||||
--define 'FEATURES=["auth","billing","analytics"]'
|
||||
|
||||
# ❌ This won't work - missing quotes around string
|
||||
--define VERSION=1.0.0
|
||||
```
|
||||
|
||||
### Property keys
|
||||
|
||||
You can use property access patterns as keys, not just simple identifiers:
|
||||
|
||||
```sh
|
||||
# ✅ Replace process.env.NODE_ENV with "production"
|
||||
--define 'process.env.NODE_ENV="production"'
|
||||
|
||||
# ✅ Replace process.env.API_KEY with the actual key
|
||||
--define 'process.env.API_KEY="abc123"'
|
||||
|
||||
# ✅ Replace nested properties
|
||||
--define 'window.myApp.version="1.0.0"'
|
||||
|
||||
# ✅ Replace array access
|
||||
--define 'process.argv[2]="--production"'
|
||||
```
|
||||
|
||||
This is particularly useful for environment variables:
|
||||
|
||||
```ts
|
||||
// Before compilation
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After compilation with --define 'process.env.NODE_ENV="production"'
|
||||
if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After optimization
|
||||
console.log("Production mode");
|
||||
```
|
||||
|
||||
### TypeScript declarations
|
||||
|
||||
For TypeScript projects, declare your constants to avoid type errors:
|
||||
|
||||
```ts
|
||||
// types/build-constants.d.ts
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const NODE_ENV: "development" | "staging" | "production";
|
||||
declare const DEBUG: boolean;
|
||||
```
|
||||
|
||||
### Cross-platform compatibility
|
||||
|
||||
When building for multiple platforms, constants work the same way:
|
||||
|
||||
```sh
|
||||
# Linux
|
||||
$ bun build --compile --target=bun-linux-x64 --define PLATFORM='"linux"' src/app.ts --outfile app-linux
|
||||
|
||||
# macOS
|
||||
$ bun build --compile --target=bun-darwin-x64 --define PLATFORM='"darwin"' src/app.ts --outfile app-macos
|
||||
|
||||
# Windows
|
||||
$ bun build --compile --target=bun-windows-x64 --define PLATFORM='"windows"' src/app.ts --outfile app-windows.exe
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related
|
||||
|
||||
- [Define constants at runtime](/guides/runtime/define-constant) - Using `--define` with `bun run`
|
||||
- [Building executables](/bundler/executables) - Complete guide to `bun build --compile`
|
||||
- [Bundler API](/bundler) - Full bundler documentation including `define` option
|
||||
@@ -52,8 +52,6 @@ In your root-level `package.json`, add a `catalog` or `catalogs` field within th
|
||||
}
|
||||
```
|
||||
|
||||
If you put `catalog` or `catalogs` at the top level of the `package.json` file, that will work too.
|
||||
|
||||
### 2. Reference Catalog Versions in Workspace Packages
|
||||
|
||||
In your workspace packages, use the `catalog:` protocol to reference versions:
|
||||
|
||||
@@ -81,14 +81,6 @@ $ bun install --verbose # debug logging
|
||||
$ bun install --silent # no logging
|
||||
```
|
||||
|
||||
To use isolated installs instead of the default hoisted strategy:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create strict dependency isolation similar to pnpm, preventing phantom dependencies and ensuring more deterministic builds. For complete documentation, see [Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
@@ -118,10 +110,6 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
Bun provides an alternative package installation strategy called **isolated installs** that creates strict dependency isolation similar to pnpm's approach. This mode prevents phantom dependencies and ensures reproducible, deterministic builds.
|
||||
|
||||
## What are isolated installs?
|
||||
|
||||
Isolated installs create a non-hoisted dependency structure where packages can only access their explicitly declared dependencies. This differs from the traditional "hoisted" installation strategy used by npm and Yarn, where dependencies are flattened into a shared `node_modules` directory.
|
||||
|
||||
### Key benefits
|
||||
|
||||
- **Prevents phantom dependencies** — Packages cannot accidentally import dependencies they haven't declared
|
||||
- **Deterministic resolution** — Same dependency tree regardless of what else is installed
|
||||
- **Better for monorepos** — Workspace isolation prevents cross-contamination between packages
|
||||
- **Reproducible builds** — More predictable resolution behavior across environments
|
||||
|
||||
## Using isolated installs
|
||||
|
||||
### Command line
|
||||
|
||||
Use the `--linker` flag to specify the installation strategy:
|
||||
|
||||
```bash
|
||||
# Use isolated installs
|
||||
$ bun install --linker isolated
|
||||
|
||||
# Use traditional hoisted installs
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
|
||||
Set the default linker strategy in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linker = "isolated"
|
||||
```
|
||||
|
||||
### Default behavior
|
||||
|
||||
By default, Bun uses the **hoisted** installation strategy for all projects. To use isolated installs, you must explicitly specify the `--linker isolated` flag or set it in your configuration file.
|
||||
|
||||
## How isolated installs work
|
||||
|
||||
### Directory structure
|
||||
|
||||
Instead of hoisting dependencies, isolated installs create a two-tier structure:
|
||||
|
||||
```
|
||||
node_modules/
|
||||
├── .bun/ # Central package store
|
||||
│ ├── package@1.0.0/ # Versioned package installations
|
||||
│ │ └── node_modules/
|
||||
│ │ └── package/ # Actual package files
|
||||
│ ├── @scope+package@2.1.0/ # Scoped packages (+ replaces /)
|
||||
│ │ └── node_modules/
|
||||
│ │ └── @scope/
|
||||
│ │ └── package/
|
||||
│ └── ...
|
||||
└── package-name -> .bun/package@1.0.0/node_modules/package # Symlinks
|
||||
```
|
||||
|
||||
### Resolution algorithm
|
||||
|
||||
1. **Central store** — All packages are installed in `node_modules/.bun/package@version/` directories
|
||||
2. **Symlinks** — Top-level `node_modules` contains symlinks pointing to the central store
|
||||
3. **Peer resolution** — Complex peer dependencies create specialized directory names
|
||||
4. **Deduplication** — Packages with identical package IDs and peer dependency sets are shared
|
||||
|
||||
### Workspace handling
|
||||
|
||||
In monorepos, workspace dependencies are handled specially:
|
||||
|
||||
- **Workspace packages** — Symlinked directly to their source directories, not the store
|
||||
- **Workspace dependencies** — Can access other workspace packages in the monorepo
|
||||
- **External dependencies** — Installed in the isolated store with proper isolation
|
||||
|
||||
## Comparison with hoisted installs
|
||||
|
||||
| Aspect | Hoisted (npm/Yarn) | Isolated (pnpm-like) |
|
||||
| ------------------------- | ------------------------------------------ | --------------------------------------- |
|
||||
| **Dependency access** | Packages can access any hoisted dependency | Packages only see declared dependencies |
|
||||
| **Phantom dependencies** | ❌ Possible | ✅ Prevented |
|
||||
| **Disk usage** | ✅ Lower (shared installs) | ✅ Similar (uses symlinks) |
|
||||
| **Determinism** | ❌ Less deterministic | ✅ More deterministic |
|
||||
| **Node.js compatibility** | ✅ Standard behavior | ✅ Compatible via symlinks |
|
||||
| **Best for** | Single projects, legacy code | Monorepos, strict dependency management |
|
||||
|
||||
## Advanced features
|
||||
|
||||
### Peer dependency handling
|
||||
|
||||
Isolated installs handle peer dependencies through sophisticated resolution:
|
||||
|
||||
```bash
|
||||
# Package with peer dependencies creates specialized paths
|
||||
node_modules/.bun/package@1.0.0_react@18.2.0/
|
||||
```
|
||||
|
||||
The directory name encodes both the package version and its peer dependency versions, ensuring each unique combination gets its own installation.
|
||||
|
||||
### Backend strategies
|
||||
|
||||
Bun uses different file operation strategies for performance:
|
||||
|
||||
- **Clonefile** (macOS) — Copy-on-write filesystem clones for maximum efficiency
|
||||
- **Hardlink** (Linux/Windows) — Hardlinks to save disk space
|
||||
- **Copyfile** (fallback) — Full file copies when other methods aren't available
|
||||
|
||||
### Debugging isolated installs
|
||||
|
||||
Enable verbose logging to understand the installation process:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated --verbose
|
||||
```
|
||||
|
||||
This shows:
|
||||
|
||||
- Store entry creation
|
||||
- Symlink operations
|
||||
- Peer dependency resolution
|
||||
- Deduplication decisions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Compatibility issues
|
||||
|
||||
Some packages may not work correctly with isolated installs due to:
|
||||
|
||||
- **Hardcoded paths** — Packages that assume a flat `node_modules` structure
|
||||
- **Dynamic imports** — Runtime imports that don't follow Node.js resolution
|
||||
- **Build tools** — Tools that scan `node_modules` directly
|
||||
|
||||
If you encounter issues, you can:
|
||||
|
||||
1. **Switch to hoisted mode** for specific projects:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
2. **Report compatibility issues** to help improve isolated install support
|
||||
|
||||
### Performance considerations
|
||||
|
||||
- **Install time** — May be slightly slower due to symlink operations
|
||||
- **Disk usage** — Similar to hoisted (uses symlinks, not file copies)
|
||||
- **Memory usage** — Higher during install due to complex peer resolution
|
||||
|
||||
## Migration guide
|
||||
|
||||
### From npm/Yarn
|
||||
|
||||
```bash
|
||||
# Remove existing node_modules and lockfiles
|
||||
$ rm -rf node_modules package-lock.json yarn.lock
|
||||
|
||||
# Install with isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
### From pnpm
|
||||
|
||||
Isolated installs are conceptually similar to pnpm, so migration should be straightforward:
|
||||
|
||||
```bash
|
||||
# Remove pnpm files
|
||||
$ rm -rf node_modules pnpm-lock.yaml
|
||||
|
||||
# Install with Bun's isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
The main difference is that Bun uses symlinks in `node_modules` while pnpm uses a global store with symlinks.
|
||||
|
||||
## When to use isolated installs
|
||||
|
||||
**Use isolated installs when:**
|
||||
|
||||
- Working in monorepos with multiple packages
|
||||
- Strict dependency management is required
|
||||
- Preventing phantom dependencies is important
|
||||
- Building libraries that need deterministic dependencies
|
||||
|
||||
**Use hoisted installs when:**
|
||||
|
||||
- Working with legacy code that assumes flat `node_modules`
|
||||
- Compatibility with existing build tools is required
|
||||
- Working in environments where symlinks aren't well supported
|
||||
- You prefer the simpler traditional npm behavior
|
||||
|
||||
## Related documentation
|
||||
|
||||
- [Package manager > Workspaces](https://bun.com/docs/install/workspaces) — Monorepo workspace management
|
||||
- [Package manager > Lockfile](https://bun.com/docs/install/lockfile) — Understanding Bun's lockfile format
|
||||
- [CLI > install](https://bun.com/docs/cli/install) — Complete `bun install` command reference
|
||||
@@ -176,16 +176,10 @@ export default {
|
||||
page("cli/pm", "`bun pm`", {
|
||||
description: "Utilities relating to package management with Bun.",
|
||||
}),
|
||||
page("cli/why", "`bun why`", {
|
||||
description: "Explains why a package is installed in your project.",
|
||||
}),
|
||||
page("install/cache", "Global cache", {
|
||||
description:
|
||||
"Bun's package manager installs all packages into a shared global cache to avoid redundant re-downloads.",
|
||||
}),
|
||||
page("install/isolated", "Isolated installs", {
|
||||
description: "Create strict dependency isolation, preventing phantom dependencies.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and monorepo development workflows.",
|
||||
}),
|
||||
|
||||
@@ -20,7 +20,7 @@ this one:
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *jsc.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
@@ -33,7 +33,7 @@ const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
@@ -136,7 +136,7 @@ You should add this to `$Env:PATH`. The simplest way to do so is to open the sta
|
||||
|
||||
## Tests
|
||||
|
||||
You can run the test suite either using `bun test <path>` or by using the wrapper script `bun node:test <path>`. The `bun node:test` command runs every test file in a separate instance of bun.exe, to prevent a crash in the test runner from stopping the entire suite.
|
||||
You can run the test suite either using `bun test <path>`, or by using the wrapper script `packages\bun-internal-test`. The internal test package is a wrapper cli to run every test file in a separate instance of bun.exe, to prevent a crash in the test runner from stopping the entire suite.
|
||||
|
||||
```ps1
|
||||
# Setup
|
||||
|
||||
@@ -108,21 +108,6 @@ The `telemetry` field permit to enable/disable the analytics records. Bun record
|
||||
telemetry = false
|
||||
```
|
||||
|
||||
### `console`
|
||||
|
||||
Configure console output behavior.
|
||||
|
||||
#### `console.depth`
|
||||
|
||||
Set the default depth for `console.log()` object inspection. Default `2`.
|
||||
|
||||
```toml
|
||||
[console]
|
||||
depth = 3
|
||||
```
|
||||
|
||||
This controls how deeply nested objects are displayed in console output. Higher values show more nested properties but may produce verbose output for complex objects. This setting can be overridden by the `--console-depth` CLI flag.
|
||||
|
||||
## Test runner
|
||||
|
||||
The test runner is configured under the `[test]` section of your bunfig.toml.
|
||||
@@ -195,24 +180,6 @@ Whether to skip test files when computing coverage statistics. Default `false`.
|
||||
coverageSkipTestFiles = false
|
||||
```
|
||||
|
||||
### `test.coveragePathIgnorePatterns`
|
||||
|
||||
Exclude specific files or file patterns from coverage reports using glob patterns. Can be a single string pattern or an array of patterns.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
```
|
||||
|
||||
### `test.coverageReporter`
|
||||
|
||||
By default, coverage reports will be printed to the console. For persistent code coverage reports in CI environments and for other tools use `lcov`.
|
||||
|
||||
@@ -71,26 +71,6 @@ coverageThreshold = { lines = 0.9, functions = 0.8, statements = 0.85 }
|
||||
|
||||
Setting any of these enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
#### coveragePathIgnorePatterns
|
||||
|
||||
Exclude specific files or file patterns from coverage reports using glob patterns:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
```
|
||||
|
||||
Files matching any of these patterns will be excluded from coverage calculation and reporting. See the [coverage documentation](./coverage.md) for more details and examples.
|
||||
|
||||
#### coverageIgnoreSourcemaps
|
||||
|
||||
Internally, Bun transpiles every file. That means code coverage must also go through sourcemaps before they can be reported. We expose this as a flag to allow you to opt out of this behavior, but it will be confusing because during the transpilation process, Bun may move code around and change variable names. This option is mostly useful for debugging coverage issues.
|
||||
|
||||
@@ -57,18 +57,7 @@ coverageThreshold = { lines = 0.9, functions = 0.9, statements = 0.9 }
|
||||
|
||||
Setting any of these thresholds enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
### Exclude files from coverage
|
||||
|
||||
#### Skip test files
|
||||
### Exclude test files from coverage
|
||||
|
||||
By default, test files themselves are included in coverage reports. You can exclude them with:
|
||||
|
||||
@@ -79,33 +68,15 @@ coverageSkipTestFiles = true # default false
|
||||
|
||||
This will exclude files matching test patterns (e.g., _.test.ts, _\_spec.js) from the coverage report.
|
||||
|
||||
#### Ignore specific paths and patterns
|
||||
### Sourcemaps
|
||||
|
||||
You can exclude specific files or file patterns from coverage reports using `coveragePathIgnorePatterns`:
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Single pattern
|
||||
coveragePathIgnorePatterns = "**/*.spec.ts"
|
||||
|
||||
# Multiple patterns
|
||||
coveragePathIgnorePatterns = [
|
||||
"**/*.spec.ts",
|
||||
"**/*.test.ts",
|
||||
"src/utils/**",
|
||||
"*.config.js"
|
||||
]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
This option accepts glob patterns and works similarly to Jest's `collectCoverageFrom` ignore patterns. Files matching any of these patterns will be excluded from coverage calculation and reporting in both text and LCOV outputs.
|
||||
|
||||
Common use cases:
|
||||
|
||||
- Exclude utility files: `"src/utils/**"`
|
||||
- Exclude configuration files: `"*.config.js"`
|
||||
- Exclude specific test patterns: `"**/*.spec.ts"`
|
||||
- Exclude build artifacts: `"dist/**"`
|
||||
|
||||
### Coverage defaults
|
||||
|
||||
By default, coverage reports:
|
||||
@@ -113,7 +84,6 @@ By default, coverage reports:
|
||||
1. Exclude `node_modules` directories
|
||||
2. Exclude files loaded via non-JS/TS loaders (e.g., .css, .txt) unless a custom JS loader is specified
|
||||
3. Include test files themselves (can be disabled with `coverageSkipTestFiles = true` as shown above)
|
||||
4. Can exclude additional files with `coveragePathIgnorePatterns` as shown above
|
||||
|
||||
### Coverage reporters
|
||||
|
||||
|
||||
38
instructions.md
Normal file
38
instructions.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Fixing CSS modules in Bun's dev server
|
||||
|
||||
Look inside the reproduction folder: /Users/zackradisic/Code/bun-repro-18258/
|
||||
|
||||
When importing a CSS module, it is not being resolved correctly and the following error is thrown:
|
||||
|
||||
```
|
||||
frontend ReferenceError: import_Ooga_module is not defined
|
||||
at App (/Users/zackradisic/Code/bun-repro-18258/src/App.tsx:5:21)
|
||||
at react-stack-bottom-frame (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:23863:20)
|
||||
at renderWithHooks (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:5529:22)
|
||||
at updateFunctionComponent (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:8897:19)
|
||||
at beginWork (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:10522:18)
|
||||
at runWithFiberInDEV (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:1519:30)
|
||||
at performUnitOfWork (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:15132:22)
|
||||
at workLoopSync (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:14956:41)
|
||||
at renderRootSync (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:14936:11)
|
||||
at performWorkOnRoot (/Users/zackradisic/Code/bun-repro-18258/node_modules/react-dom/cjs/react-dom-client.development.js:14462:44)
|
||||
```
|
||||
|
||||
Some notes on how CSS modules work:
|
||||
|
||||
- We generate a JS object where the keys are the class names and the values are the class names made unique. This starts on line 226 in `generateCodeForLazyExport.zig`. We do this using `E.NameOfSymbol` which uses a `Ref` because we don't know the final generated name until after all CSS modules are parsed.
|
||||
- The `mangleLocalCss` (inside `LinkerContext.zig`) is called after all CSS modules are parsed. It is used to mangle the class names in the CSS modules so that they are unique.
|
||||
|
||||
Some notes about how Bun's dev server is different and is what likely causing the issue:
|
||||
|
||||
- The dev server skips stuff and doesn't call all the same linker functions
|
||||
- This is causing `generateCodeForLazyExport` to _not_ be called and thus the JS object is not generated for importing CSS modules
|
||||
- We will have to create our own routine which will do this
|
||||
- One key place to look at is `finalizeBundle(...)` in `DevServer.zig`, that's when the bundle is finalized.
|
||||
|
||||
You instructions:
|
||||
|
||||
1. Split out the existing code in `generateCodeForLazyExport` which handles CSS into a new function which can be called separately
|
||||
2. Call this new function in `finalizeBundle(...)` in `DevServer.zig`
|
||||
|
||||
**IMPORTANT**: If you need ANY help from me (for example running bun or something), please ask me by executing the command `say bun2 help`.
|
||||
@@ -2,15 +2,16 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const Features = bun.analytics.Features;
|
||||
const C = bun.C;
|
||||
const Features = @import("../src/analytics/analytics_thread.zig").Features;
|
||||
|
||||
// zig run --main-pkg-path ../ ./features.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
@@ -195,7 +196,7 @@ pub fn main() anyerror!void {
|
||||
response_body: MutableString = undefined,
|
||||
context: HTTP.HTTPChannelContext = undefined,
|
||||
};
|
||||
const Batch = bun.ThreadPool.Batch;
|
||||
const Batch = @import("../src/thread_pool.zig").Batch;
|
||||
var groups = try default_allocator.alloc(Group, args.count);
|
||||
var repeat_i: usize = 0;
|
||||
while (repeat_i < args.repeat + 1) : (repeat_i += 1) {
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
// most of this file is copy pasted from other files in misctools
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
const Archive = @import("../src/libarchive/libarchive.zig").Archive;
|
||||
const Zlib = @import("../src/zlib.zig");
|
||||
|
||||
13
package.json
13
package.json
@@ -1,14 +1,12 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.20",
|
||||
"version": "1.2.19",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -24,14 +22,15 @@
|
||||
"@types/bun": "workspace:packages/@types/bun"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun --silent run build:debug",
|
||||
"build": "bun run build:debug",
|
||||
"ci": "bun scripts/buildkite-failures.ts ",
|
||||
"watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
|
||||
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",
|
||||
|
||||
@@ -107,8 +107,6 @@ type InitializeRequest = DAP.InitializeRequest & {
|
||||
supportsConfigurationDoneRequest?: boolean;
|
||||
enableControlFlowProfiler?: boolean;
|
||||
enableDebugger?: boolean;
|
||||
enableTestReporter?: boolean;
|
||||
enableConsole?: boolean | true;
|
||||
} & (
|
||||
| {
|
||||
enableLifecycleAgentReporter?: false;
|
||||
@@ -461,10 +459,7 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
|
||||
this.send("Inspector.enable");
|
||||
this.send("Runtime.enable");
|
||||
|
||||
if (request.enableConsole ?? true) {
|
||||
this.send("Console.enable");
|
||||
}
|
||||
this.send("Console.enable");
|
||||
|
||||
if (request.enableControlFlowProfiler) {
|
||||
this.send("Runtime.enableControlFlowProfiler");
|
||||
@@ -478,10 +473,6 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
}
|
||||
}
|
||||
|
||||
if (request.enableTestReporter) {
|
||||
this.send("TestReporter.enable");
|
||||
}
|
||||
|
||||
// use !== false because by default if unspecified we want to enable the debugger
|
||||
// and this option didn't exist beforehand, so we can't make it non-optional
|
||||
if (request.enableDebugger !== false) {
|
||||
|
||||
@@ -2507,8 +2507,7 @@ export namespace JSC {
|
||||
export type StopTrackingResponse = {};
|
||||
}
|
||||
export namespace TestReporter {
|
||||
export type TestStatus = "pass" | "fail" | "timeout" | "skip" | "todo" | "skipped_because_label";
|
||||
export type TestType = "test" | "describe";
|
||||
export type TestStatus = "pass" | "fail" | "timeout" | "skip" | "todo";
|
||||
/**
|
||||
* undefined
|
||||
* @event `TestReporter.found`
|
||||
@@ -2534,14 +2533,6 @@ export namespace JSC {
|
||||
* Name of the test that started.
|
||||
*/
|
||||
name?: string | undefined;
|
||||
/**
|
||||
* Type of the item found (test or describe block).
|
||||
*/
|
||||
type?: TestType | undefined;
|
||||
/**
|
||||
* ID of the parent describe block, if any.
|
||||
*/
|
||||
parentId?: number | undefined;
|
||||
};
|
||||
/**
|
||||
* undefined
|
||||
|
||||
@@ -3014,12 +3014,7 @@
|
||||
{
|
||||
"id": "TestStatus",
|
||||
"type": "string",
|
||||
"enum": ["pass", "fail", "timeout", "skip", "todo", "skipped_because_label"]
|
||||
},
|
||||
{
|
||||
"id": "TestType",
|
||||
"type": "string",
|
||||
"enum": ["test", "describe"]
|
||||
"enum": ["pass", "fail", "timeout", "skip", "todo"]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
@@ -3063,18 +3058,6 @@
|
||||
"type": "string",
|
||||
"description": "Name of the test that started.",
|
||||
"optional": true
|
||||
},
|
||||
{
|
||||
"name": "type",
|
||||
"$ref": "TestType",
|
||||
"description": "Type of the item found (test or describe block).",
|
||||
"optional": true
|
||||
},
|
||||
{
|
||||
"name": "parentId",
|
||||
"type": "integer",
|
||||
"description": "ID of the parent describe block, if any.",
|
||||
"optional": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
5
packages/bun-internal-test/.gitignore
vendored
Normal file
5
packages/bun-internal-test/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
failing-tests.txt
|
||||
packages/
|
||||
47
packages/bun-internal-test/bun.lock
Normal file
47
packages/bun-internal-test/bun.lock
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"@actions/core": "latest",
|
||||
"p-queue": "^8.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/p-queue": "^3.2.1",
|
||||
"bun-types": "canary",
|
||||
"prettier": "^2.8.2",
|
||||
},
|
||||
},
|
||||
"runners/bun": {
|
||||
"name": "bun",
|
||||
},
|
||||
"runners/qunit": {
|
||||
"name": "qunit",
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@actions/core": ["@actions/core@1.10.1", "", { "dependencies": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" } }, "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g=="],
|
||||
|
||||
"@actions/http-client": ["@actions/http-client@2.1.1", "", { "dependencies": { "tunnel": "^0.0.6" } }, "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw=="],
|
||||
|
||||
"@types/p-queue": ["@types/p-queue@3.2.1", "", { "dependencies": { "p-queue": "*" } }, "sha512-tgAdn5zEs05NuHzOyRM34cMO0rczStphR/kLo/ZJwwwJ5S2+QVxwA6gST3vDHWPB1oDfUuT6wOouhJvJkBCA0w=="],
|
||||
|
||||
"bun": ["bun@workspace:runners/bun"],
|
||||
|
||||
"bun-types": ["bun-types@1.0.4-canary.20230929T233451", "", {}, "sha512-Ke/y3GX0T2ZYKx0UKKCnFRRP9bXzVRcRZCrIlF5/aRNEpgbfrMBw+s5imPqrV2CqJ7q6kIbMikCahzMr4N9PTg=="],
|
||||
|
||||
"eventemitter3": ["eventemitter3@5.0.1", "", {}, "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="],
|
||||
|
||||
"p-queue": ["p-queue@8.0.1", "", { "dependencies": { "eventemitter3": "^5.0.1", "p-timeout": "^6.1.2" } }, "sha512-NXzu9aQJTAzbBqOt2hwsR63ea7yvxJc0PwN/zobNAudYfb1B7R08SzB4TsLeSbUCuG467NhnoT0oO6w1qRO+BA=="],
|
||||
|
||||
"p-timeout": ["p-timeout@6.1.2", "", {}, "sha512-UbD77BuZ9Bc9aABo74gfXhNvzC9Tx7SxtHSh1fxvx3jTLLYvmVhiQZZrJzqqU0jKbN32kb5VOKiLEQI/3bIjgQ=="],
|
||||
|
||||
"prettier": ["prettier@2.8.8", "", { "bin": { "prettier": "bin-prettier.js" } }, "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q=="],
|
||||
|
||||
"qunit": ["qunit@workspace:runners/qunit"],
|
||||
|
||||
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
|
||||
|
||||
"uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="],
|
||||
}
|
||||
}
|
||||
22
packages/bun-internal-test/package.json
Normal file
22
packages/bun-internal-test/package.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun-internal-test",
|
||||
"type": "module",
|
||||
"workspaces": [
|
||||
"runners/bun",
|
||||
"runners/qunit"
|
||||
],
|
||||
"dependencies": {
|
||||
"@actions/core": "latest",
|
||||
"p-queue": "^8.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/p-queue": "^3.2.1",
|
||||
"bun-types": "canary",
|
||||
"prettier": "^2.8.2"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node src/runner.node.mjs",
|
||||
"test:ecosystem": "bun scripts/run-ecosystem-tests.ts"
|
||||
}
|
||||
}
|
||||
128
packages/bun-internal-test/resources/packages.json
Normal file
128
packages/bun-internal-test/resources/packages.json
Normal file
@@ -0,0 +1,128 @@
|
||||
[
|
||||
{
|
||||
"name": "lodash",
|
||||
"repository": {
|
||||
"github": "lodash/lodash"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "test/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "express",
|
||||
"repository": {
|
||||
"github": "expressjs/express"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "test/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "moment",
|
||||
"repository": {
|
||||
"github": "moment/moment"
|
||||
},
|
||||
"test": {
|
||||
"runner": "qunit",
|
||||
"path": "src/test/moment/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "underscore",
|
||||
"repository": {
|
||||
"github": "jashkenas/underscore"
|
||||
},
|
||||
"test": {
|
||||
"runner": "qunit",
|
||||
"path": "test/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "glob",
|
||||
"repository": {
|
||||
"github": "isaacs/node-glob"
|
||||
},
|
||||
"test": {
|
||||
"runner": "tap",
|
||||
"path": "test/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "uuid",
|
||||
"repository": {
|
||||
"github": "uuidjs/uuid"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "test/unit/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "elysia",
|
||||
"repository": {
|
||||
"github": "elysiajs/elysia"
|
||||
},
|
||||
"test": {
|
||||
"runner": "bun",
|
||||
"path": "test/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "hono",
|
||||
"repository": {
|
||||
"github": "honojs/hono"
|
||||
},
|
||||
"test": {
|
||||
"runner": "bun",
|
||||
"env": {
|
||||
"NAME": "Bun"
|
||||
},
|
||||
"args": ["--jsx-import-source", "src/middleware/jsx/jsx-dev-runtime"],
|
||||
"path": "runtime_tests/bun/index.test.tsx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "shumai",
|
||||
"repository": {
|
||||
"github": "facebookresearch/shumai"
|
||||
},
|
||||
"test": {
|
||||
"runner": "bun",
|
||||
"path": "test/",
|
||||
"skip": "TODO: handle shumai's external dependencies"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "zod",
|
||||
"repository": {
|
||||
"github": "colinhacks/zod"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "src/__tests__/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "fs-extra",
|
||||
"repository": {
|
||||
"github": "jprichardson/node-fs-extra"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "lib/*/__tests__/*"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "graphql-yoga",
|
||||
"repository": {
|
||||
"github": "dotansimha/graphql-yoga"
|
||||
},
|
||||
"test": {
|
||||
"runner": "jest",
|
||||
"path": "packages/graphql-yoga/__tests__/",
|
||||
"skip": "TODO: bun install does not work"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,556 @@
|
||||
// Bun Snapshot v1, https://bun.com/docs/test/snapshots
|
||||
|
||||
exports[`runTests() can run all tests 1`] = `
|
||||
{
|
||||
"exitCode": 1,
|
||||
"files": [
|
||||
{
|
||||
"file": "path/to/example4.test.ts",
|
||||
"status": "fail",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 1,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 1,
|
||||
"tests": 1,
|
||||
"todo": 2,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should skip",
|
||||
"status": "skip",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should todo",
|
||||
"status": "todo",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"errors": [
|
||||
{
|
||||
"message": "expect(received).toBe(expected)\n\nExpected: false\nReceived: true\n",
|
||||
"name": "Error",
|
||||
"preview": "10 | test.todo(\"this should todo and fail\", () => {\n11 | expect(true).toBe(false);\n ^",
|
||||
"stack": [
|
||||
{
|
||||
"column": 12,
|
||||
"file": "path/to/example4.test.ts",
|
||||
"function": undefined,
|
||||
"line": 11,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "this should todo and fail",
|
||||
"status": "todo",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should todo and pass",
|
||||
"status": "fail",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 1,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 1,
|
||||
"tests": 1,
|
||||
"todo": 2,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTests() can run all tests 2`] = `
|
||||
{
|
||||
"exitCode": 0,
|
||||
"files": [
|
||||
{
|
||||
"file": "example2.spec.js",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"name": "this should pass",
|
||||
"status": "pass",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTests() can run all tests 3`] = `
|
||||
{
|
||||
"exitCode": 1,
|
||||
"files": [
|
||||
{
|
||||
"file": "example3.test.mjs",
|
||||
"status": "fail",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 2,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 2,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"errors": [
|
||||
{
|
||||
"message": "expect(received).toBe(expected)\n\nExpected: false\nReceived: true\n",
|
||||
"name": "Error",
|
||||
"preview": "1 | \n2 | import { test, expect } from \"bun:test\";\n3 | \n4 | test(\"this should fail\", () => {\n5 | expect(true).toBe(false);\n ^",
|
||||
"stack": [
|
||||
{
|
||||
"column": 8,
|
||||
"file": "example3.test.mjs",
|
||||
"function": undefined,
|
||||
"line": 5,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "this should fail",
|
||||
"status": "fail",
|
||||
},
|
||||
{
|
||||
"duration": 1,
|
||||
"errors": [
|
||||
{
|
||||
"message": "test \"this should timeout\" timed out after 1ms",
|
||||
"name": "Timeout",
|
||||
},
|
||||
],
|
||||
"name": "this should timeout",
|
||||
"status": "fail",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 2,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 2,
|
||||
"todo": 0,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTests() can run all tests 4`] = `
|
||||
{
|
||||
"exitCode": 0,
|
||||
"files": [
|
||||
{
|
||||
"file": "example1.test.ts",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 0,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 0,
|
||||
"todo": 0,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTests() can run all tests 5`] = `
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"file": "path/to/example4.test.ts",
|
||||
"status": "fail",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 1,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 1,
|
||||
"tests": 1,
|
||||
"todo": 2,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should skip",
|
||||
"status": "skip",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should todo",
|
||||
"status": "todo",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"errors": [
|
||||
{
|
||||
"message": "expect(received).toBe(expected)\n\nExpected: false\nReceived: true\n",
|
||||
"name": "Error",
|
||||
"preview": "10 | test.todo(\"this should todo and fail\", () => {\n11 | expect(true).toBe(false);\n ^",
|
||||
"stack": [
|
||||
{
|
||||
"column": 12,
|
||||
"file": "path/to/example4.test.ts",
|
||||
"function": undefined,
|
||||
"line": 11,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "this should todo and fail",
|
||||
"status": "todo",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should todo and pass",
|
||||
"status": "fail",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"file": "example2.spec.js",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"name": "this should pass",
|
||||
"status": "pass",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"file": "example3.test.mjs",
|
||||
"status": "fail",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 2,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 2,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"errors": [
|
||||
{
|
||||
"message": "expect(received).toBe(expected)\n\nExpected: false\nReceived: true\n",
|
||||
"name": "Error",
|
||||
"preview": "1 | \n2 | import { test, expect } from \"bun:test\";\n3 | \n4 | test(\"this should fail\", () => {\n5 | expect(true).toBe(false);\n ^",
|
||||
"stack": [
|
||||
{
|
||||
"column": 8,
|
||||
"file": "example3.test.mjs",
|
||||
"function": undefined,
|
||||
"line": 5,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "this should fail",
|
||||
"status": "fail",
|
||||
},
|
||||
{
|
||||
"duration": 1,
|
||||
"errors": [
|
||||
{
|
||||
"message": "test \"this should timeout\" timed out after 1ms",
|
||||
"name": "Timeout",
|
||||
},
|
||||
],
|
||||
"name": "this should timeout",
|
||||
"status": "fail",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"file": "example1.test.ts",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 0,
|
||||
"skip": 0,
|
||||
"tests": 0,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 3,
|
||||
"files": 4,
|
||||
"pass": 1,
|
||||
"skip": 1,
|
||||
"tests": 4,
|
||||
"todo": 2,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTest() can run a test 1`] = `
|
||||
{
|
||||
"exitCode": 0,
|
||||
"files": [
|
||||
{
|
||||
"file": "example2.test.ts",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"name": "this should pass",
|
||||
"status": "pass",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTest() can run a test with a symlink 1`] = `
|
||||
{
|
||||
"exitCode": 1,
|
||||
"files": [
|
||||
{
|
||||
"file": "example1.ts",
|
||||
"status": "fail",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 1,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 1,
|
||||
"tests": 2,
|
||||
"todo": 1,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"name": "this should pass",
|
||||
"status": "pass",
|
||||
},
|
||||
{
|
||||
"duration": 1,
|
||||
"errors": [
|
||||
{
|
||||
"message": "expect(received).toBe(expected)\n\nExpected: false\nReceived: true\n",
|
||||
"name": "Error",
|
||||
"preview": "4 | test(\"this should pass\", () => {\n5 | expect(true).toBe(true);\n6 | });\n7 | \n8 | test(\"this should fail\", () => {\n9 | expect(true).toBe(false);\n ^",
|
||||
"stack": [
|
||||
{
|
||||
"column": 8,
|
||||
"file": "example1.ts",
|
||||
"function": undefined,
|
||||
"line": 9,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "this should fail",
|
||||
"status": "fail",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should skip",
|
||||
"status": "skip",
|
||||
},
|
||||
{
|
||||
"duration": 0,
|
||||
"name": "this should todo",
|
||||
"status": "todo",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 1,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 1,
|
||||
"tests": 2,
|
||||
"todo": 1,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`runTest() can run a test with a preload 1`] = `
|
||||
{
|
||||
"exitCode": 0,
|
||||
"files": [
|
||||
{
|
||||
"file": "preload.test.ts",
|
||||
"status": "pass",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"duration": 1,
|
||||
"name": "test should have preloaded",
|
||||
"status": "pass",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"info": {
|
||||
"arch": undefined,
|
||||
"name": "bun test",
|
||||
"os": undefined,
|
||||
"revision": "",
|
||||
"version": "",
|
||||
},
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"summary": {
|
||||
"duration": 1,
|
||||
"fail": 0,
|
||||
"files": 1,
|
||||
"pass": 1,
|
||||
"skip": 0,
|
||||
"tests": 1,
|
||||
"todo": 0,
|
||||
},
|
||||
}
|
||||
`;
|
||||
5
packages/bun-internal-test/runners/bun/package.json
Normal file
5
packages/bun-internal-test/runners/bun/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"module": "runner.ts"
|
||||
}
|
||||
263
packages/bun-internal-test/runners/bun/runner.test.ts
Normal file
263
packages/bun-internal-test/runners/bun/runner.test.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { mkdirSync, mkdtempSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import type { FindTestOptions, ParseTestResult, RunTestResult } from "./runner";
|
||||
import { bunSpawn, findTests, nodeSpawn, runTest, runTests } from "./runner";
|
||||
|
||||
describe("runTests()", () => {
|
||||
const cwd = createFs({
|
||||
"example1.test.ts": "",
|
||||
"example2.spec.js": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("this should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
`,
|
||||
"example3.test.mjs": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("this should fail", () => {
|
||||
expect(true).toBe(false);
|
||||
});
|
||||
|
||||
test("this should timeout", async () => {
|
||||
await Bun.sleep(2);
|
||||
}, 1);
|
||||
`,
|
||||
"path": {
|
||||
"to": {
|
||||
"example4.test.ts": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test.skip("this should skip", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
test.todo("this should todo");
|
||||
|
||||
test.todo("this should todo and fail", () => {
|
||||
expect(true).toBe(false);
|
||||
});
|
||||
|
||||
test.todo("this should todo and pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
`,
|
||||
},
|
||||
},
|
||||
});
|
||||
test("can run all tests", async () => {
|
||||
const results = runTests({ cwd });
|
||||
while (true) {
|
||||
const { value, done } = await results.next();
|
||||
toMatchResult(value);
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("runTest()", () => {
|
||||
const cwd = createFs({
|
||||
"example1.ts": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("this should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
test("this should fail", () => {
|
||||
expect(true).toBe(false);
|
||||
});
|
||||
|
||||
test.skip("this should skip", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
test.todo("this should todo");
|
||||
`,
|
||||
"path": {
|
||||
"to": {
|
||||
"example2.test.ts": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("this should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
`,
|
||||
},
|
||||
},
|
||||
"preload": {
|
||||
"preload.test.ts": `
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("test should have preloaded", () => {
|
||||
expect(globalThis.preload).toBe(true);
|
||||
});
|
||||
`,
|
||||
"preload.ts": `
|
||||
globalThis.preload = true;
|
||||
`,
|
||||
},
|
||||
});
|
||||
test("can run a test", async () => {
|
||||
const result = await runTest({
|
||||
cwd,
|
||||
path: "path/to/example2.ts",
|
||||
});
|
||||
toMatchResult(result);
|
||||
});
|
||||
test("can run a test with a symlink", async () => {
|
||||
const result = await runTest({
|
||||
cwd,
|
||||
path: "example1.ts",
|
||||
});
|
||||
toMatchResult(result);
|
||||
});
|
||||
test("can run a test with a preload", async () => {
|
||||
const result = await runTest({
|
||||
cwd,
|
||||
path: "preload/preload.test.ts",
|
||||
preload: ["./preload/preload.ts"],
|
||||
});
|
||||
toMatchResult(result);
|
||||
});
|
||||
});
|
||||
|
||||
function toMatchResult(result: ParseTestResult | RunTestResult): void {
|
||||
if (result.summary.duration) {
|
||||
result.summary.duration = 1;
|
||||
}
|
||||
result.info.revision = "";
|
||||
result.info.version = "";
|
||||
result.info.os = undefined;
|
||||
result.info.arch = undefined;
|
||||
for (const file of result.files) {
|
||||
if (file.summary.duration) {
|
||||
file.summary.duration = 1;
|
||||
}
|
||||
for (const test of file.tests) {
|
||||
if (test.duration) {
|
||||
test.duration = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ("stderr" in result) {
|
||||
result.stderr = "";
|
||||
result.stdout = "";
|
||||
}
|
||||
expect(result).toMatchSnapshot();
|
||||
}
|
||||
|
||||
describe("findTests()", () => {
|
||||
const cwd = createFs({
|
||||
"readme.md": "",
|
||||
"package.json": "",
|
||||
"path": {
|
||||
"to": {
|
||||
"example1.js": "",
|
||||
"example2.test.ts": "",
|
||||
"example3.spec.js": "",
|
||||
"example.txt": "",
|
||||
},
|
||||
"example4.js.map": "",
|
||||
"example4.js": "",
|
||||
"example5.test.ts": "",
|
||||
},
|
||||
});
|
||||
const find = (options: FindTestOptions = {}) => {
|
||||
const results = findTests({ cwd, ...options });
|
||||
return [...results].sort();
|
||||
};
|
||||
test("can find all tests", () => {
|
||||
const results = find();
|
||||
expect(results).toEqual([
|
||||
"path/example4.js",
|
||||
"path/example5.test.ts",
|
||||
"path/to/example1.js",
|
||||
"path/to/example2.test.ts",
|
||||
"path/to/example3.spec.js",
|
||||
]);
|
||||
});
|
||||
test("can find tests that match a directory", () => {
|
||||
const results = find({
|
||||
filters: ["path/to/"],
|
||||
});
|
||||
expect(results).toEqual(["path/to/example1.js", "path/to/example2.test.ts", "path/to/example3.spec.js"]);
|
||||
});
|
||||
test("can find tests that match a file", () => {
|
||||
const results = find({
|
||||
filters: ["example1.js", "example5.test.ts"],
|
||||
});
|
||||
expect(results).toEqual(["path/example5.test.ts", "path/to/example1.js"]);
|
||||
});
|
||||
test("can find tests that match a glob", () => {
|
||||
const results = find({
|
||||
filters: ["path/to/*.js", "*.spec.*"],
|
||||
});
|
||||
expect(results).toEqual(["path/to/example1.js", "path/to/example3.spec.js"]);
|
||||
});
|
||||
test("can find no tests", () => {
|
||||
const results = find({
|
||||
filters: ["path/to/nowhere/*"],
|
||||
});
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("bunSpawn()", () => {
|
||||
testSpawn(bunSpawn);
|
||||
});
|
||||
|
||||
describe("nodeSpawn()", () => {
|
||||
testSpawn(nodeSpawn);
|
||||
});
|
||||
|
||||
function testSpawn(spawn: typeof bunSpawn): void {
|
||||
test("can run a command", async () => {
|
||||
const { exitCode, stdout, stderr } = await spawn({
|
||||
cmd: "echo",
|
||||
args: ["hello world"],
|
||||
});
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toBe("hello world\n");
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
test("can timeout a command", async () => {
|
||||
const { exitCode, stdout, stderr } = await spawn({
|
||||
cmd: "sleep",
|
||||
args: ["60"],
|
||||
timeout: 1,
|
||||
});
|
||||
expect(exitCode).toBe(null);
|
||||
expect(stdout).toBe("");
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
}
|
||||
|
||||
type FsTree = {
|
||||
[path: string]: FsTree | string;
|
||||
};
|
||||
|
||||
function createFs(tree: FsTree): string {
|
||||
let cwd = mkdtempSync(join(tmpdir(), "bun-internal-test-"));
|
||||
if (cwd.startsWith("/var/folders")) {
|
||||
cwd = join("/private", cwd); // HACK: macOS
|
||||
}
|
||||
const traverse = (tree: FsTree, path: string) => {
|
||||
for (const [name, content] of Object.entries(tree)) {
|
||||
const newPath = join(path, name);
|
||||
if (typeof content === "string") {
|
||||
writeFileSync(newPath, content);
|
||||
} else {
|
||||
mkdirSync(newPath);
|
||||
traverse(content, newPath);
|
||||
}
|
||||
}
|
||||
};
|
||||
traverse(tree, cwd);
|
||||
return cwd;
|
||||
}
|
||||
744
packages/bun-internal-test/runners/bun/runner.ts
Normal file
744
packages/bun-internal-test/runners/bun/runner.ts
Normal file
@@ -0,0 +1,744 @@
|
||||
// This file parses the output of `bun test` and outputs
|
||||
// a markdown summary and Github Action annotations.
|
||||
//
|
||||
// In the future, a version of this will be built-in to Bun.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { fsyncSync, readdirSync, symlinkSync, unlinkSync, writeSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
export type TestInfo = {
|
||||
name: string;
|
||||
version: string;
|
||||
revision: string;
|
||||
os?: string;
|
||||
arch?: string;
|
||||
};
|
||||
|
||||
export type TestFile = {
|
||||
file: string;
|
||||
status: TestStatus;
|
||||
tests: Test[];
|
||||
summary: TestSummary;
|
||||
errors?: TestError[];
|
||||
};
|
||||
|
||||
export type TestError = {
|
||||
name: string;
|
||||
message: string;
|
||||
preview?: string;
|
||||
stack?: TestErrorStack[];
|
||||
};
|
||||
|
||||
export type TestErrorStack = {
|
||||
file: string;
|
||||
function?: string;
|
||||
line: number;
|
||||
column?: number;
|
||||
};
|
||||
|
||||
export type TestStatus = "pass" | "fail" | "skip" | "todo";
|
||||
|
||||
export type Test = {
|
||||
name: string;
|
||||
status: TestStatus;
|
||||
duration: number;
|
||||
errors?: TestError[];
|
||||
};
|
||||
|
||||
export type TestSummary = {
|
||||
pass: number;
|
||||
fail: number;
|
||||
skip: number;
|
||||
todo: number;
|
||||
tests: number;
|
||||
files: number;
|
||||
duration: number;
|
||||
};
|
||||
|
||||
export type RunTestsOptions = ParseTestOptions & {
|
||||
filters?: string[];
|
||||
preload?: string[];
|
||||
env?: Record<string, string>;
|
||||
args?: string[];
|
||||
timeout?: number;
|
||||
};
|
||||
|
||||
export async function* runTests(options: RunTestsOptions = {}): AsyncGenerator<RunTestResult, ParseTestResult> {
|
||||
const { cwd = process.cwd(), filters, timeout, preload, env, args } = options;
|
||||
const knownPaths = [...listFiles(cwd)];
|
||||
const paths = [...findTests({ cwd, knownPaths, filters })];
|
||||
if (!paths.length) {
|
||||
throw new Error(`No tests found; ${knownPaths.length} files did not match: ${filters}`);
|
||||
}
|
||||
const startTest = (path: string) =>
|
||||
runTest({
|
||||
cwd,
|
||||
path,
|
||||
knownPaths,
|
||||
preload,
|
||||
timeout,
|
||||
env,
|
||||
args,
|
||||
});
|
||||
const results: RunTestResult[] = [];
|
||||
const batchSize = 10;
|
||||
for (let i = 0; i < paths.length; i += batchSize) {
|
||||
for (const test of paths.slice(i, i + batchSize).map(startTest)) {
|
||||
const result = await test;
|
||||
results.push(result);
|
||||
yield result;
|
||||
}
|
||||
}
|
||||
return {
|
||||
info: results.map(result => result.info).pop()!,
|
||||
files: results.flatMap(result => result.files),
|
||||
summary: results
|
||||
.map(result => result.summary)
|
||||
.reduce((summary, result) => {
|
||||
summary.pass += result.pass;
|
||||
summary.fail += result.fail;
|
||||
summary.skip += result.skip;
|
||||
summary.todo += result.todo;
|
||||
summary.tests += result.tests;
|
||||
summary.files += result.files;
|
||||
summary.duration += result.duration;
|
||||
return summary;
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export type RunTestOptions = ParseTestOptions & {
|
||||
path: string;
|
||||
preload?: string[];
|
||||
timeout?: number;
|
||||
env?: Record<string, string>;
|
||||
args?: string[];
|
||||
};
|
||||
|
||||
export type RunTestResult = ParseTestResult & {
|
||||
exitCode: number | null;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
};
|
||||
|
||||
export async function runTest(options: RunTestOptions): Promise<RunTestResult> {
|
||||
const { cwd = process.cwd(), path, knownPaths, preload = [], timeout, env = {}, args = [] } = options;
|
||||
let file = path;
|
||||
if (!isTestJavaScript(file)) {
|
||||
const i = file.lastIndexOf(".");
|
||||
file = `${file.substring(0, i)}.test.${file.substring(i + 1)}`;
|
||||
try {
|
||||
symlinkSync(join(cwd, path), join(cwd, file));
|
||||
} catch {}
|
||||
}
|
||||
const { exitCode, stdout, stderr } = await bunSpawn({
|
||||
cwd,
|
||||
cmd: "bun",
|
||||
args: ["test", ...args, ...preload.flatMap(path => ["--preload", path]), file],
|
||||
env: {
|
||||
...process.env,
|
||||
...env,
|
||||
"FORCE_COLOR": "1",
|
||||
},
|
||||
timeout,
|
||||
});
|
||||
if (file !== path) {
|
||||
try {
|
||||
unlinkSync(join(cwd, file));
|
||||
} catch {}
|
||||
}
|
||||
const result = parseTest(stderr, { cwd, knownPaths });
|
||||
result.info.os ||= process.platform;
|
||||
result.info.arch ||= process.arch;
|
||||
if ("Bun" in globalThis && Bun.revision.startsWith(result.info.revision)) {
|
||||
result.info.revision = Bun.revision;
|
||||
}
|
||||
if (exitCode !== 0 && !result.summary.fail) {
|
||||
result.summary.fail = 1;
|
||||
result.files[0].summary.fail = 1;
|
||||
result.files[0].status = "fail";
|
||||
}
|
||||
return {
|
||||
exitCode,
|
||||
stdout,
|
||||
stderr,
|
||||
...result,
|
||||
};
|
||||
}
|
||||
|
||||
export function printTest(result: ParseTestResult | RunTestResult): void {
|
||||
const isAction = process.env["GITHUB_ACTIONS"] === "true";
|
||||
const isSingle = result.files.length === 1;
|
||||
if (isSingle) {
|
||||
const { file, status } = result.files[0];
|
||||
if (isAction) {
|
||||
printAnnotation("group", `${status.toUpperCase()} - ${file}`);
|
||||
} else {
|
||||
print(`\n${file}:\n`);
|
||||
}
|
||||
}
|
||||
if ("stderr" in result) {
|
||||
print(result.stderr);
|
||||
print(result.stdout);
|
||||
}
|
||||
if (!isAction) {
|
||||
print("\n");
|
||||
return;
|
||||
}
|
||||
result.files
|
||||
.filter(({ status }) => status === "fail")
|
||||
.flatMap(({ tests }) => tests)
|
||||
.filter(({ status }) => status === "fail")
|
||||
.flatMap(({ name: title, errors }) =>
|
||||
errors?.forEach(({ name, message, stack }) => {
|
||||
const { file, line } = stack?.[0] ?? {};
|
||||
if (is3rdParty(file)) {
|
||||
return;
|
||||
}
|
||||
printAnnotation("error", `${name}: ${message}`, {
|
||||
file,
|
||||
line,
|
||||
title,
|
||||
});
|
||||
}),
|
||||
);
|
||||
if (isSingle) {
|
||||
printAnnotation("endgroup");
|
||||
}
|
||||
}
|
||||
|
||||
function print(buffer: string | Uint8Array) {
|
||||
if (typeof buffer === "string") {
|
||||
buffer = new TextEncoder().encode(buffer);
|
||||
}
|
||||
let offset = 0;
|
||||
let length = buffer.byteLength;
|
||||
while (offset < length) {
|
||||
try {
|
||||
const n = writeSync(1, buffer);
|
||||
offset += n;
|
||||
if (offset < length) {
|
||||
try {
|
||||
fsyncSync(1);
|
||||
} catch {}
|
||||
buffer = buffer.slice(n);
|
||||
}
|
||||
} catch (error) {
|
||||
// @ts-ignore
|
||||
if (error.code === "EAGAIN") {
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: there is a bug that causes annotations to be duplicated
|
||||
const annotations = new Set<string>();
|
||||
|
||||
function printAnnotation(type: string, arg?: string, args?: Record<string, unknown>): void {
|
||||
let line = `::${type}`;
|
||||
if (args) {
|
||||
line += " ";
|
||||
line += Object.entries(args)
|
||||
.map(([key, value]) => `${key}=${value}`)
|
||||
.join(",");
|
||||
}
|
||||
line += "::";
|
||||
if (arg) {
|
||||
line += arg;
|
||||
}
|
||||
line = line.replace(/\n/g, "%0A");
|
||||
if (annotations.has(line)) {
|
||||
return;
|
||||
}
|
||||
annotations.add(line);
|
||||
print(`\n${line}\n`);
|
||||
}
|
||||
|
||||
function is3rdParty(file?: string): boolean {
|
||||
return !file || file.startsWith("/") || file.includes(":") || file.includes("..") || file.includes("node_modules/");
|
||||
}
|
||||
|
||||
export type ParseTestOptions = {
|
||||
cwd?: string;
|
||||
knownPaths?: string[];
|
||||
};
|
||||
|
||||
export type ParseTestResult = {
|
||||
info: TestInfo;
|
||||
files: TestFile[];
|
||||
summary: TestSummary;
|
||||
};
|
||||
|
||||
export function parseTest(stderr: string, options: ParseTestOptions = {}): ParseTestResult {
|
||||
const { cwd, knownPaths } = options;
|
||||
const linesAnsi = stderr.split("\n");
|
||||
const lines = linesAnsi.map(stripAnsi);
|
||||
let info: TestInfo | undefined;
|
||||
const parseInfo = (line: string): TestInfo | undefined => {
|
||||
const match = /^(bun (?:wip)?test) v([0-9\.]+) \(([0-9a-z]+)\)$/.exec(line);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
const [, name, version, sha] = match;
|
||||
return {
|
||||
name,
|
||||
version,
|
||||
revision: sha,
|
||||
};
|
||||
};
|
||||
let files: TestFile[] = [];
|
||||
let file: TestFile | undefined;
|
||||
const parseFile = (line: string): TestFile | undefined => {
|
||||
let file = line.slice(0, -1);
|
||||
if (!isJavaScript(file) || !line.endsWith(":")) {
|
||||
return undefined;
|
||||
}
|
||||
for (const path of knownPaths ?? []) {
|
||||
if (path.endsWith(file)) {
|
||||
file = path;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
file,
|
||||
tests: [],
|
||||
status: "pass",
|
||||
summary: {
|
||||
files: 1,
|
||||
tests: 0,
|
||||
pass: 0,
|
||||
fail: 0,
|
||||
skip: 0,
|
||||
todo: 0,
|
||||
duration: 0,
|
||||
},
|
||||
};
|
||||
};
|
||||
const parseTestLine = (line: string): Test | undefined => {
|
||||
const match = /^(✓|‚úì|✗|‚úó|»|-|✎) (.*)$/.exec(line);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
const [, icon, name] = match;
|
||||
let status: TestStatus = "fail";
|
||||
switch (icon) {
|
||||
case "✓":
|
||||
case "‚úì":
|
||||
status = "pass";
|
||||
break;
|
||||
case "✗":
|
||||
case "‚úó":
|
||||
status = "fail";
|
||||
break;
|
||||
case "»":
|
||||
case "-":
|
||||
status = "skip";
|
||||
break;
|
||||
case "✎":
|
||||
status = "todo";
|
||||
break;
|
||||
}
|
||||
const match2 = /^(.*) \[([0-9]+\.[0-9]+)(m?s)\]$/.exec(name);
|
||||
if (!match2) {
|
||||
return {
|
||||
name,
|
||||
status,
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
const [, title, duration, unit] = match2;
|
||||
return {
|
||||
name: title,
|
||||
status,
|
||||
duration: parseFloat(duration ?? "0") * (unit === "ms" ? 1000 : 1) || 0,
|
||||
};
|
||||
};
|
||||
let errors: TestError[] = [];
|
||||
let error: TestError | undefined;
|
||||
const parseError = (line: string): TestError | undefined => {
|
||||
const match = /^(.*error|timeout)\: (.*)$/i.exec(line);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
const [, name, message] = match;
|
||||
return {
|
||||
name: name === "error" ? "Error" : name,
|
||||
message,
|
||||
};
|
||||
};
|
||||
const parseErrorStack = (line: string): TestErrorStack | undefined => {
|
||||
let match = /^\s*at (.*) \((.*)\:([0-9]+)\:([0-9]+)\)$/.exec(line);
|
||||
if (!match) {
|
||||
match = /^\s*at (.*)\:([0-9]+)\:([0-9]+)$/.exec(line);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
const [columnNo, lineNo, path, func] = match.reverse();
|
||||
let file = path;
|
||||
if (cwd && path.startsWith(cwd)) {
|
||||
file = path.slice(cwd.length);
|
||||
if (file.startsWith("/")) {
|
||||
file = file.slice(1);
|
||||
}
|
||||
}
|
||||
return {
|
||||
file,
|
||||
function: func !== line ? func : undefined,
|
||||
line: parseInt(lineNo),
|
||||
column: parseInt(columnNo),
|
||||
};
|
||||
};
|
||||
const parseErrorPreview = (line: string): string | undefined => {
|
||||
if (line.endsWith("^") || /^[0-9]+ \| /.test(line)) {
|
||||
return line;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
let summary: TestSummary | undefined;
|
||||
const parseSummary = (line: string): TestSummary | undefined => {
|
||||
const match = /^Ran ([0-9]+) tests across ([0-9]+) files\. .* \[([0-9]+\.[0-9]+)(m?s)\]$/.exec(line);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
const [, tests, files, duration, unit] = match;
|
||||
return {
|
||||
pass: 0,
|
||||
fail: 0,
|
||||
skip: 0,
|
||||
todo: 0,
|
||||
tests: parseInt(tests),
|
||||
files: parseInt(files),
|
||||
duration: parseFloat(duration) * (unit === "s" ? 1000 : 1),
|
||||
};
|
||||
};
|
||||
const createSummary = (files: TestFile[]): TestSummary => {
|
||||
const summary = {
|
||||
pass: 0,
|
||||
fail: 0,
|
||||
skip: 0,
|
||||
todo: 0,
|
||||
tests: 0,
|
||||
files: 0,
|
||||
duration: 0,
|
||||
};
|
||||
for (const file of files) {
|
||||
summary.files++;
|
||||
summary.duration += file.summary.duration;
|
||||
for (const test of file.tests) {
|
||||
summary.tests++;
|
||||
summary[test.status]++;
|
||||
}
|
||||
if (file.errors?.length) {
|
||||
summary.fail++;
|
||||
}
|
||||
}
|
||||
return summary;
|
||||
};
|
||||
const parseSkip = (line: string): number => {
|
||||
const match = /^([0-9]+) tests (?:skipped|failed|todo)\:$/.exec(line);
|
||||
if (match) {
|
||||
return parseInt(match[1]);
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
const endOfFile = (file?: TestFile): void => {
|
||||
if (file && !file.tests.length && errors.length) {
|
||||
file.errors = errors;
|
||||
errors = [];
|
||||
}
|
||||
};
|
||||
let errorStart = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (!info && !(info = parseInfo(line))) {
|
||||
continue;
|
||||
}
|
||||
const newFile = parseFile(line);
|
||||
if (newFile) {
|
||||
endOfFile(file);
|
||||
files.push((file = newFile));
|
||||
continue;
|
||||
}
|
||||
const newError = parseError(line);
|
||||
if (newError) {
|
||||
errorStart = i;
|
||||
errors.push((error = newError));
|
||||
for (let j = 1; j < 8 && i - j >= 0; j++) {
|
||||
const line = lines[i - j];
|
||||
const preview = parseErrorPreview(line);
|
||||
if (!preview) {
|
||||
break;
|
||||
}
|
||||
if (error.preview) {
|
||||
error.preview = preview + "\n" + error.preview;
|
||||
} else {
|
||||
error.preview = preview;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const newStack = parseErrorStack(line);
|
||||
if (newStack) {
|
||||
if (error) {
|
||||
error.stack ||= [];
|
||||
error.stack.push(newStack);
|
||||
for (let j = errorStart + 1; j < i && error.stack.length === 1; j++) {
|
||||
error.message += "\n" + lines[j];
|
||||
}
|
||||
} else {
|
||||
// TODO: newStack and !error
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const newTest = parseTestLine(line);
|
||||
if (newTest) {
|
||||
if (error && newTest.status === "skip") {
|
||||
continue; // Likely a false positive from error message
|
||||
}
|
||||
if (error) {
|
||||
for (let j = errorStart + 1; j < i - 1 && !error.stack?.length; j++) {
|
||||
error.message += "\n" + lines[j];
|
||||
}
|
||||
error = undefined;
|
||||
}
|
||||
if (errors.length) {
|
||||
newTest.errors = errors;
|
||||
errors = [];
|
||||
}
|
||||
file!.tests.push(newTest);
|
||||
continue;
|
||||
}
|
||||
const newSummary = parseSummary(line);
|
||||
if (newSummary) {
|
||||
summary = newSummary;
|
||||
break;
|
||||
}
|
||||
i += parseSkip(line);
|
||||
}
|
||||
endOfFile(file);
|
||||
if (!info) {
|
||||
throw new Error("No tests found; did the test runner crash?");
|
||||
}
|
||||
summary ||= createSummary(files);
|
||||
const count = (status: TestStatus): number => {
|
||||
return files.reduce((n, file) => n + file.tests.filter(test => test.status === status).length, 0);
|
||||
};
|
||||
summary.pass ||= count("pass");
|
||||
summary.fail ||= count("fail");
|
||||
summary.skip ||= count("skip");
|
||||
summary.todo ||= count("todo");
|
||||
const getStatus = (summary: TestSummary) => {
|
||||
return summary.fail ? "fail" : !summary.pass && summary.skip ? "skip" : "pass";
|
||||
};
|
||||
if (files.length === 1) {
|
||||
files[0].summary = { ...summary };
|
||||
files[0].status = getStatus(summary);
|
||||
} else {
|
||||
for (const file of files) {
|
||||
const summary = createSummary([file]);
|
||||
file.summary = summary;
|
||||
file.status = getStatus(summary);
|
||||
}
|
||||
}
|
||||
return {
|
||||
info,
|
||||
files,
|
||||
summary,
|
||||
};
|
||||
}
|
||||
|
||||
function stripAnsi(string: string): string {
|
||||
return string.replace(/\x1b\[[0-9;]*m/g, "");
|
||||
}
|
||||
|
||||
export type FindTestOptions = {
|
||||
cwd?: string;
|
||||
knownPaths?: string[];
|
||||
filters?: string[];
|
||||
};
|
||||
|
||||
export function* findTests(options: FindTestOptions = {}): Generator<string> {
|
||||
const { cwd = process.cwd(), knownPaths, filters = [] } = options;
|
||||
const paths = knownPaths ?? listFiles(cwd);
|
||||
for (const path of paths) {
|
||||
if (!isJavaScript(path)) {
|
||||
continue;
|
||||
}
|
||||
let match = filters.length === 0;
|
||||
for (const filter of filters) {
|
||||
if (isGlob(filter)) {
|
||||
match = isGlobMatch(filter, path);
|
||||
} else if (filter.endsWith("/")) {
|
||||
match = path.startsWith(filter);
|
||||
} else if (isJavaScript(filter)) {
|
||||
match = path.endsWith(filter);
|
||||
} else {
|
||||
match = path.includes(filter);
|
||||
}
|
||||
if (match) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!match) {
|
||||
continue;
|
||||
}
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
|
||||
function* listFiles(cwd: string, dir: string = ""): Generator<string> {
|
||||
const dirents = readdirSync(join(cwd, dir), { withFileTypes: true });
|
||||
for (const dirent of dirents) {
|
||||
const { name } = dirent;
|
||||
if (name === "node_modules" || name.startsWith(".")) {
|
||||
continue;
|
||||
}
|
||||
const path = join(dir, name);
|
||||
if (dirent.isDirectory()) {
|
||||
yield* listFiles(cwd, path);
|
||||
} else if (dirent.isFile()) {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isJavaScript(path: string): boolean {
|
||||
return /\.(c|m)?(t|j)sx?$/.test(path);
|
||||
}
|
||||
|
||||
function isTestJavaScript(path: string): boolean {
|
||||
return /\.(test|spec)\.(c|m)?(t|j)sx?$/.test(path);
|
||||
}
|
||||
|
||||
function isGlob(path: string): boolean {
|
||||
return path.includes("*");
|
||||
}
|
||||
|
||||
function isGlobMatch(glob: string, path: string): boolean {
|
||||
return new RegExp(`^${glob.replace(/\*/g, ".*")}$`).test(path);
|
||||
}
|
||||
|
||||
export type SpawnOptions = {
|
||||
cmd: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string>;
|
||||
timeout?: number;
|
||||
};
|
||||
|
||||
export type SpawnResult = {
|
||||
exitCode: number | null;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
};
|
||||
|
||||
export async function nodeSpawn(options: SpawnOptions): Promise<SpawnResult> {
|
||||
const { cmd, args = [], cwd, env, timeout } = options;
|
||||
const subprocess = spawn(cmd, args, {
|
||||
cwd,
|
||||
env,
|
||||
timeout,
|
||||
stdio: "pipe",
|
||||
});
|
||||
let stderr = "";
|
||||
let stdout = "";
|
||||
subprocess.stdout.on("data", (data: Buffer) => {
|
||||
stdout += data.toString("utf-8");
|
||||
});
|
||||
subprocess.stderr.on("data", (data: Buffer) => {
|
||||
stderr += data.toString("utf-8");
|
||||
});
|
||||
const exitCode = await new Promise<number | null>(resolve => {
|
||||
subprocess.on("error", ({ name, message }) => {
|
||||
stderr += `${name}: ${message}`;
|
||||
resolve(null);
|
||||
});
|
||||
subprocess.on("exit", exitCode => {
|
||||
resolve(exitCode);
|
||||
});
|
||||
});
|
||||
return {
|
||||
exitCode,
|
||||
stdout,
|
||||
stderr,
|
||||
};
|
||||
}
|
||||
|
||||
export async function bunSpawn(options: SpawnOptions): Promise<SpawnResult> {
|
||||
const { cmd, args = [], cwd, env, timeout } = options;
|
||||
const subprocess = Bun.spawn({
|
||||
cwd,
|
||||
env,
|
||||
cmd: [cmd, ...args],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
lazy: false,
|
||||
});
|
||||
const consume = async (stream?: ReadableStream) => {
|
||||
let result = "";
|
||||
const decoder = new TextDecoder();
|
||||
for await (const chunk of stream ?? []) {
|
||||
result += decoder.decode(chunk);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
const exitCode = await Promise.race([
|
||||
timeout ? Bun.sleep(timeout).then(() => null) : subprocess.exited,
|
||||
subprocess.exited,
|
||||
]);
|
||||
if (!subprocess.killed) {
|
||||
subprocess.kill();
|
||||
}
|
||||
const [stdout, stderr] = await Promise.all([consume(subprocess.stdout), consume(subprocess.stderr)]);
|
||||
return {
|
||||
exitCode,
|
||||
stdout,
|
||||
stderr,
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
let filters = [...process.argv.slice(2)];
|
||||
let timeout;
|
||||
let isolate;
|
||||
let quiet;
|
||||
for (let i = 0; i < filters.length; i++) {
|
||||
const filter = filters[i];
|
||||
if (filter.startsWith("--timeout=")) {
|
||||
timeout = parseInt(filter.split("=").pop()!);
|
||||
} else if (filter.startsWith("--isolate")) {
|
||||
isolate = true;
|
||||
} else if (filter.startsWith("--quiet")) {
|
||||
quiet = true;
|
||||
}
|
||||
}
|
||||
filters = filters.filter(filter => !filter.startsWith("--"));
|
||||
const results = runTests({
|
||||
filters,
|
||||
timeout,
|
||||
});
|
||||
let result;
|
||||
while (true) {
|
||||
const { value, done } = await results.next();
|
||||
if (done) {
|
||||
result = value;
|
||||
break;
|
||||
} else if (!quiet) {
|
||||
printTest(value);
|
||||
}
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function isMain() {
|
||||
// @ts-ignore
|
||||
return import.meta.main || import.meta.url === `file://${process.argv[1]}`;
|
||||
}
|
||||
|
||||
if (isMain()) {
|
||||
await main();
|
||||
}
|
||||
233
packages/bun-internal-test/runners/qunit/assert.ts
Normal file
233
packages/bun-internal-test/runners/qunit/assert.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
import type { BunExpect } from "bun-test";
|
||||
import type { Assert } from "./qunit.d";
|
||||
|
||||
export { $Assert as Assert };
|
||||
|
||||
class $Assert implements Assert {
|
||||
#$expect: BunExpect;
|
||||
#assertions = 0;
|
||||
#assertionsExpected: number | undefined;
|
||||
#asyncs = 0;
|
||||
#asyncsExpected: number | undefined;
|
||||
#promises: Promise<unknown>[] | undefined;
|
||||
#steps: string[] | undefined;
|
||||
#timeout: number | undefined;
|
||||
#abort: AbortController | undefined;
|
||||
|
||||
constructor(expect: BunExpect) {
|
||||
this.#$expect = expect;
|
||||
}
|
||||
|
||||
get #expect() {
|
||||
this.#assertions++;
|
||||
return this.#$expect;
|
||||
}
|
||||
|
||||
async(count?: number): () => void {
|
||||
const expected = Math.max(0, count ?? 1);
|
||||
if (this.#asyncsExpected === undefined) {
|
||||
this.#asyncsExpected = expected;
|
||||
} else {
|
||||
this.#asyncsExpected += expected;
|
||||
}
|
||||
let actual = 0;
|
||||
return () => {
|
||||
this.#asyncs++;
|
||||
if (actual++ > expected) {
|
||||
throw new Error(`Expected ${expected} calls to async(), but got ${actual} instead`);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
deepEqual<T>(actual: T, expected: T, message?: string): void {
|
||||
this.#expect(actual).toStrictEqual(expected);
|
||||
}
|
||||
|
||||
equal(actual: any, expected: any, message?: string): void {
|
||||
this.#expect(actual == expected).toBe(true);
|
||||
}
|
||||
|
||||
expect(amount: number): void {
|
||||
// If falsy, then the test can pass without any assertions.
|
||||
this.#assertionsExpected = Math.max(0, amount);
|
||||
}
|
||||
|
||||
false(state: any, message?: string): void {
|
||||
this.#expect(state).toBe(false);
|
||||
}
|
||||
|
||||
notDeepEqual(actual: any, expected: any, message?: string): void {
|
||||
this.#expect(actual).not.toStrictEqual(expected);
|
||||
}
|
||||
|
||||
notEqual(actual: any, expected: any, message?: string): void {
|
||||
this.#expect(actual == expected).toBe(false);
|
||||
}
|
||||
|
||||
notOk(state: any, message?: string): void {
|
||||
this.#expect(state).toBeFalsy();
|
||||
}
|
||||
|
||||
notPropContains(actual: any, expected: any, message?: string): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
notPropEqual(actual: any, expected: any, message?: string): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
notStrictEqual(actual: any, expected: any, message?: string): void {
|
||||
this.#expect(actual).not.toBe(expected);
|
||||
}
|
||||
|
||||
ok(state: any, message?: string): void {
|
||||
this.#expect(state).toBeTruthy();
|
||||
}
|
||||
|
||||
propContains(actual: any, expected: any, message?: string): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
propEqual(actual: any, expected: any, message?: string): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
pushResult(assertResult: { result: boolean; actual: any; expected: any; message?: string; source?: string }): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async rejects(promise: unknown, expectedMatcher?: unknown, message?: unknown): Promise<void> {
|
||||
if (!(promise instanceof Promise)) {
|
||||
throw new Error(`Expected a promise, but got ${promise} instead`);
|
||||
}
|
||||
let passed = true;
|
||||
const result = promise
|
||||
.then(value => {
|
||||
passed = false;
|
||||
throw new Error(`Expected promise to reject, but it resolved with ${value}`);
|
||||
})
|
||||
.catch(error => {
|
||||
if (passed && expectedMatcher !== undefined) {
|
||||
// @ts-expect-error
|
||||
this.#$expect(() => {
|
||||
throw error;
|
||||
}).toThrow(expectedMatcher);
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
this.#assertions++;
|
||||
});
|
||||
if (this.#promises === undefined) {
|
||||
this.#promises = [result];
|
||||
} else {
|
||||
this.#promises.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
timeout(duration: number): void {
|
||||
if (this.#timeout !== undefined) {
|
||||
clearTimeout(this.#timeout);
|
||||
}
|
||||
if (this.#abort === undefined) {
|
||||
this.#abort = new AbortController();
|
||||
}
|
||||
const error = new Error(`Test timed out after ${duration}ms`);
|
||||
const onAbort = () => {
|
||||
this.#abort!.abort(error);
|
||||
};
|
||||
hideFromStack(onAbort);
|
||||
this.#timeout = +setTimeout(onAbort, Math.max(0, duration));
|
||||
}
|
||||
|
||||
step(value: string): void {
|
||||
if (this.#steps) {
|
||||
this.#steps.push(value);
|
||||
} else {
|
||||
this.#steps = [value];
|
||||
}
|
||||
}
|
||||
|
||||
strictEqual<T>(actual: T, expected: T, message?: string): void {
|
||||
this.#expect(actual).toBe(expected);
|
||||
}
|
||||
|
||||
throws(block: () => void, expected?: any, message?: any): void {
|
||||
if (expected === undefined) {
|
||||
this.#expect(block).toThrow();
|
||||
} else {
|
||||
this.#expect(block).toThrow(expected);
|
||||
}
|
||||
}
|
||||
|
||||
raises(block: () => void, expected?: any, message?: any): void {
|
||||
if (expected === undefined) {
|
||||
this.#expect(block).toThrow();
|
||||
} else {
|
||||
this.#expect(block).toThrow(expected);
|
||||
}
|
||||
}
|
||||
|
||||
true(state: any, message?: string): void {
|
||||
this.#expect(state).toBe(true);
|
||||
}
|
||||
|
||||
verifySteps(steps: string[], message?: string): void {
|
||||
const actual = this.#steps ?? [];
|
||||
try {
|
||||
this.#expect(actual).toStrictEqual(steps);
|
||||
} finally {
|
||||
this.#steps = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async close(timeout: number): Promise<void> {
|
||||
const newError = (reason: string) => {
|
||||
const message = this.#abort?.signal?.aborted ? `${reason} (timed out after ${timeout}ms)` : reason;
|
||||
return new Error(message);
|
||||
};
|
||||
hideFromStack(newError);
|
||||
const assert = () => {
|
||||
if (this.#assertions === 0 && this.#assertionsExpected !== 0) {
|
||||
throw newError("Test completed without any assertions");
|
||||
}
|
||||
if (this.#assertionsExpected && this.#assertionsExpected !== this.#assertions) {
|
||||
throw newError(`Expected ${this.#assertionsExpected} assertions, but got ${this.#assertions} instead`);
|
||||
}
|
||||
if (this.#asyncsExpected && this.#asyncsExpected !== this.#asyncs) {
|
||||
throw newError(`Expected ${this.#asyncsExpected} calls to async(), but got ${this.#asyncs} instead`);
|
||||
}
|
||||
};
|
||||
hideFromStack(assert);
|
||||
if (this.#promises === undefined && this.#asyncsExpected === undefined) {
|
||||
assert();
|
||||
return;
|
||||
}
|
||||
if (this.#timeout === undefined) {
|
||||
this.timeout(timeout);
|
||||
}
|
||||
const { signal } = this.#abort!;
|
||||
const onTimeout = new Promise((_, reject) => {
|
||||
signal.onabort = () => {
|
||||
reject(signal.reason);
|
||||
};
|
||||
});
|
||||
await Promise.race([onTimeout, Promise.all(this.#promises ?? [])]);
|
||||
assert();
|
||||
}
|
||||
}
|
||||
|
||||
function hideFromStack(object: any): void {
|
||||
if (typeof object === "function") {
|
||||
Object.defineProperty(object, "name", {
|
||||
value: "::bunternal::",
|
||||
});
|
||||
return;
|
||||
}
|
||||
for (const name of Object.getOwnPropertyNames(object)) {
|
||||
Object.defineProperty(object[name], "name", {
|
||||
value: "::bunternal::",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
hideFromStack($Assert.prototype);
|
||||
5
packages/bun-internal-test/runners/qunit/package.json
Normal file
5
packages/bun-internal-test/runners/qunit/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "qunit",
|
||||
"module": "qunit.ts"
|
||||
}
|
||||
115
packages/bun-internal-test/runners/qunit/qunit.d.ts
vendored
Normal file
115
packages/bun-internal-test/runners/qunit/qunit.d.ts
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
export type Fn = (assert: Assert) => Promise<void> | void;
|
||||
export type TestFn = (name: string, fn?: Fn) => void;
|
||||
export type EachFn = (assert: Assert, value: unknown) => Promise<void> | void;
|
||||
export type TestEachFn = (name: string, data: DataInit, fn?: EachFn) => void;
|
||||
export type TestOrEachFn = TestFn & { each: TestEachFn };
|
||||
export type ModuleFn = (name: string, hooks?: Hooks | HooksFn, fn?: HooksFn) => void;
|
||||
|
||||
/**
|
||||
* @link https://api.qunitjs.com/
|
||||
*/
|
||||
export type QUnit = {
|
||||
start(): void;
|
||||
config: {
|
||||
[key: string]: unknown;
|
||||
};
|
||||
test: TestOrEachFn & {
|
||||
skip: TestOrEachFn;
|
||||
todo: TestOrEachFn;
|
||||
only: TestOrEachFn;
|
||||
};
|
||||
skip: TestFn;
|
||||
todo: TestFn;
|
||||
only: TestFn;
|
||||
module: ModuleFn & {
|
||||
skip: ModuleFn;
|
||||
todo: ModuleFn;
|
||||
only: ModuleFn;
|
||||
};
|
||||
hooks: {
|
||||
beforeEach(fn: Fn): void;
|
||||
afterEach(fn: Fn): void;
|
||||
};
|
||||
assert: Assert;
|
||||
begin(fn: UnknownFn): void;
|
||||
done(fn: UnknownFn): void;
|
||||
log(fn: UnknownFn): void;
|
||||
moduleDone(fn: UnknownFn): void;
|
||||
moduleStart(fn: UnknownFn): void;
|
||||
on(fn: UnknownFn): void;
|
||||
testDone(fn: UnknownFn): void;
|
||||
testStart(fn: UnknownFn): void;
|
||||
extend(target: unknown, mixin: unknown): unknown;
|
||||
push(result: ResultInit): void;
|
||||
stack(offset?: number): string;
|
||||
onUncaughtException(fn: ErrorFn): void;
|
||||
equiv(a: unknown, b: unknown): boolean;
|
||||
dump: {
|
||||
maxDepth: number;
|
||||
parse(value: unknown): string;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @link https://api.qunitjs.com/QUnit/module/#options-object
|
||||
*/
|
||||
export type Hooks = {
|
||||
before?: Fn;
|
||||
beforeEach?: Fn;
|
||||
after?: Fn;
|
||||
afterEach?: Fn;
|
||||
};
|
||||
|
||||
export type NestedHooks = {
|
||||
before: (fn: Fn) => void;
|
||||
beforeEach: (fn: Fn) => void;
|
||||
after: (fn: Fn) => void;
|
||||
afterEach: (fn: Fn) => void;
|
||||
};
|
||||
|
||||
export type HooksFn = (hooks: NestedHooks) => void;
|
||||
|
||||
/**
|
||||
* @link https://api.qunitjs.com/assert/
|
||||
*/
|
||||
export type Assert = {
|
||||
async(count?: number): EmptyFn;
|
||||
deepEqual(actual: unknown, expected: unknown, message?: string): void;
|
||||
equal(actual: unknown, expected: unknown, message?: string): void;
|
||||
expect(count: number): void;
|
||||
false(actual: unknown, message?: string): void;
|
||||
notDeepEqual(actual: unknown, expected: unknown, message?: string): void;
|
||||
notEqual(actual: unknown, expected: unknown, message?: string): void;
|
||||
notOk(actual: unknown, message?: string): void;
|
||||
notPropContains(actual: unknown, prop: string, expected: unknown, message?: string): void;
|
||||
notPropEqual(actual: unknown, prop: string, expected: unknown, message?: string): void;
|
||||
notStrictEqual(actual: unknown, expected: unknown, message?: string): void;
|
||||
ok(actual: unknown, message?: string): void;
|
||||
propContains(actual: unknown, prop: string, expected: unknown, message?: string): void;
|
||||
propEqual(actual: unknown, prop: string, expected: unknown, message?: string): void;
|
||||
pushResult(result: ResultInit): void;
|
||||
rejects(promise: Promise<unknown>, expected?: ErrorInit, message?: string): Promise<void>;
|
||||
step(message: string): void;
|
||||
strictEqual(actual: unknown, expected: unknown, message?: string): void;
|
||||
throws(fn: () => unknown, expected?: ErrorInit, message?: string): void;
|
||||
timeout(ms: number): void;
|
||||
true(actual: unknown, message?: string): void;
|
||||
verifySteps(steps: string[], message?: string): void;
|
||||
};
|
||||
|
||||
export type ResultInit = {
|
||||
result: boolean;
|
||||
actual: unknown;
|
||||
expected: unknown;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
export type DataInit = unknown[] | Record<string, unknown>;
|
||||
|
||||
export type ErrorInit = Error | string | RegExp | ErrorConstructor;
|
||||
|
||||
export type EmptyFn = () => void;
|
||||
|
||||
export type ErrorFn = (error?: unknown) => void;
|
||||
|
||||
export type UnknownFn = (...args: unknown[]) => unknown;
|
||||
239
packages/bun-internal-test/runners/qunit/qunit.test.ts
Normal file
239
packages/bun-internal-test/runners/qunit/qunit.test.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
import { QUnit } from "qunit";
|
||||
|
||||
const { module, test } = QUnit;
|
||||
const { todo, skip } = test;
|
||||
|
||||
const pass = test;
|
||||
const fail = test;
|
||||
|
||||
module("assert.async()", () => {
|
||||
pass("1 complete task", assert => {
|
||||
const done = assert.async();
|
||||
done();
|
||||
});
|
||||
pass("2 complete tasks", assert => {
|
||||
const done1 = assert.async();
|
||||
const done2 = assert.async(2);
|
||||
done1();
|
||||
done2();
|
||||
done2();
|
||||
});
|
||||
fail("1 incomplete task", assert => {
|
||||
const done = assert.async(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.deepEqual()", () => {
|
||||
pass("equal objects", assert => {
|
||||
assert.deepEqual({ a: 1, b: { c: "d" } }, { a: 1, b: { c: "d" } });
|
||||
assert.deepEqual([1, 2, "three"], [1, 2, "three"]);
|
||||
});
|
||||
fail("unequal objects", assert => {
|
||||
assert.deepEqual({ a: 1, b: "d" }, { a: 1, b: { c: "d" } });
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.equal()", () => {
|
||||
pass("equal values", assert => {
|
||||
assert.equal(1, 1);
|
||||
assert.equal(1, "1");
|
||||
assert.equal(0, "");
|
||||
});
|
||||
fail("unequal values", assert => {
|
||||
assert.equal(null, false);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.expect()", () => {
|
||||
pass("no assertions", assert => {
|
||||
assert.expect(0);
|
||||
});
|
||||
pass("expected number of assertions", assert => {
|
||||
assert.expect(1);
|
||||
assert.ok(true);
|
||||
});
|
||||
fail("unexpected number of assertions", assert => {
|
||||
assert.expect(3);
|
||||
assert.ok(true);
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.false()", () => {
|
||||
pass("false", assert => {
|
||||
assert.false(false);
|
||||
});
|
||||
fail("falsey", assert => {
|
||||
assert.false(0);
|
||||
});
|
||||
fail("true", assert => {
|
||||
assert.false(true);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.notDeepEqual()", () => {
|
||||
pass("unequal objects", assert => {
|
||||
assert.notDeepEqual({ a: 1, b: "d" }, { a: 1, b: { c: "d" } });
|
||||
});
|
||||
fail("equal objects", assert => {
|
||||
assert.notDeepEqual({ a: 1, b: { c: "d" } }, { a: 1, b: { c: "d" } });
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.notEqual()", () => {
|
||||
pass("unequal values", assert => {
|
||||
assert.notEqual(null, false);
|
||||
});
|
||||
fail("equal values", assert => {
|
||||
assert.notEqual(1, 1);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.notOk()", () => {
|
||||
pass("false", assert => {
|
||||
assert.notOk(false);
|
||||
});
|
||||
pass("falsey", assert => {
|
||||
assert.notOk("");
|
||||
});
|
||||
fail("truthy", assert => {
|
||||
assert.notOk(1);
|
||||
});
|
||||
});
|
||||
|
||||
module.todo("assert.notPropContains()");
|
||||
|
||||
todo("assert.notPropEqual()");
|
||||
|
||||
module("assert.notStrictEqual()", () => {
|
||||
pass("unequal values", assert => {
|
||||
assert.notStrictEqual(1, "1");
|
||||
});
|
||||
fail("equal values", assert => {
|
||||
assert.notStrictEqual(1, 1);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.ok()", () => {
|
||||
pass("true", assert => {
|
||||
assert.ok(true);
|
||||
});
|
||||
pass("truthy", assert => {
|
||||
assert.ok(1);
|
||||
});
|
||||
fail("false", assert => {
|
||||
assert.ok(false);
|
||||
});
|
||||
fail("falsey", assert => {
|
||||
assert.ok("");
|
||||
});
|
||||
});
|
||||
|
||||
module.todo("assert.propContains()");
|
||||
|
||||
module.todo("assert.propEqual()");
|
||||
|
||||
module.todo("assert.pushResult()");
|
||||
|
||||
module("assert.rejects()", () => {
|
||||
skip("rejected promise", assert => {
|
||||
assert.rejects(Promise.reject()); // segfault?
|
||||
});
|
||||
pass("rejected promise", assert => {
|
||||
assert.rejects(Promise.reject(new Error("foo")), new Error("foo"));
|
||||
assert.rejects(Promise.reject(new TypeError("foo")), TypeError);
|
||||
assert.rejects(Promise.reject(new Error("foo")), "foo");
|
||||
assert.rejects(Promise.reject(new Error("foo")), /foo/);
|
||||
});
|
||||
fail("resolved promise", assert => {
|
||||
assert.rejects(Promise.resolve());
|
||||
});
|
||||
fail("rejected promise with unexpected error", assert => {
|
||||
assert.rejects(Promise.reject(new Error("foo")), "bar");
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.step()", () => {
|
||||
pass("correct steps", assert => {
|
||||
assert.step("foo");
|
||||
assert.step("bar");
|
||||
assert.verifySteps(["foo", "bar"]);
|
||||
});
|
||||
fail("incorrect steps", assert => {
|
||||
assert.step("foo");
|
||||
assert.verifySteps(["bar"]);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.strictEqual()", () => {
|
||||
pass("equal values", assert => {
|
||||
assert.strictEqual(1, 1);
|
||||
});
|
||||
fail("unequal values", assert => {
|
||||
assert.strictEqual(1, "1");
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.throws()", () => {
|
||||
pass("thrown error", assert => {
|
||||
assert.throws(() => {
|
||||
throw new Error("foo");
|
||||
}, new Error("foo"));
|
||||
assert.throws(() => {
|
||||
throw new TypeError("foo");
|
||||
}, TypeError);
|
||||
assert.throws(() => {
|
||||
throw new Error("foo");
|
||||
}, "foo");
|
||||
assert.throws(() => {
|
||||
throw new Error("foo");
|
||||
}, /foo/);
|
||||
});
|
||||
fail("no error thrown", assert => {
|
||||
assert.throws(() => {});
|
||||
});
|
||||
fail("unexpected error thrown", assert => {
|
||||
assert.throws(() => {
|
||||
throw new Error("foo");
|
||||
}, "bar");
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.timeout()", () => {
|
||||
pass("no timeout", assert => {
|
||||
assert.timeout(0);
|
||||
});
|
||||
fail("early timeout", assert => {
|
||||
const done = assert.async();
|
||||
assert.timeout(1);
|
||||
setTimeout(done, 2);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.true()", () => {
|
||||
pass("true", assert => {
|
||||
assert.true(true);
|
||||
});
|
||||
fail("truthy", assert => {
|
||||
assert.true(1);
|
||||
});
|
||||
fail("false", assert => {
|
||||
assert.true(false);
|
||||
});
|
||||
});
|
||||
|
||||
module("assert.verifySteps()", () => {
|
||||
pass("correct steps", assert => {
|
||||
assert.step("foo");
|
||||
assert.verifySteps(["foo"]);
|
||||
assert.step("bar");
|
||||
assert.verifySteps(["bar"]);
|
||||
assert.verifySteps([]);
|
||||
});
|
||||
fail("incorrect steps", assert => {
|
||||
assert.step("foo");
|
||||
assert.verifySteps(["foo", "bar"]);
|
||||
assert.step("bar");
|
||||
});
|
||||
});
|
||||
327
packages/bun-internal-test/runners/qunit/qunit.ts
Normal file
327
packages/bun-internal-test/runners/qunit/qunit.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
import { deepEquals, inspect } from "bun";
|
||||
import type { TestContext } from "bun-test";
|
||||
import { Assert } from "./assert";
|
||||
import type { DataInit, EachFn, Fn, Hooks, HooksFn, ModuleFn, TestEachFn, TestFn, TestOrEachFn } from "./qunit.d";
|
||||
|
||||
type Status = "todo" | "skip" | "only" | undefined;
|
||||
|
||||
type Module = {
|
||||
name: string;
|
||||
status: Status;
|
||||
before: Fn[];
|
||||
beforeEach: Fn[];
|
||||
afterEach: Fn[];
|
||||
after: Fn[];
|
||||
addHooks(hooks?: Hooks | HooksFn): void;
|
||||
addTest(name: string, status: Status, fn?: Fn): void;
|
||||
addTests(name: string, status: Status, data: DataInit, fn?: EachFn): void;
|
||||
};
|
||||
|
||||
function newModule(context: TestContext, moduleName: string, moduleStatus?: Status): Module {
|
||||
const before: Fn[] = [];
|
||||
const beforeEach: Fn[] = [];
|
||||
const afterEach: Fn[] = [];
|
||||
const after: Fn[] = [];
|
||||
let tests = 0;
|
||||
const addTest = (name: string, status: Status, fn?: Fn) => {
|
||||
const runTest = async () => {
|
||||
if (fn === undefined) {
|
||||
return;
|
||||
}
|
||||
const assert = new Assert(context.expect);
|
||||
if (tests++ === 1) {
|
||||
for (const fn of before) {
|
||||
await fn(assert);
|
||||
}
|
||||
}
|
||||
for (const fn of beforeEach) {
|
||||
await fn(assert);
|
||||
}
|
||||
try {
|
||||
await fn(assert);
|
||||
} finally {
|
||||
for (const fn of afterEach) {
|
||||
await fn(assert);
|
||||
}
|
||||
// TODO: need a way to know when module is done
|
||||
if (false) {
|
||||
for (const fn of after) {
|
||||
await fn(assert);
|
||||
}
|
||||
}
|
||||
// TODO: configurable timeout
|
||||
await assert.close(100);
|
||||
}
|
||||
};
|
||||
hideFromStack(runTest);
|
||||
const addTest = () => {
|
||||
if (moduleStatus !== undefined) {
|
||||
status = moduleStatus;
|
||||
}
|
||||
if (status === undefined) {
|
||||
context.test(name, runTest);
|
||||
} else if (status === "skip" || status === "todo") {
|
||||
context.test.skip(name, runTest);
|
||||
} else {
|
||||
context.test.only(name, runTest);
|
||||
}
|
||||
};
|
||||
hideFromStack(addTest);
|
||||
if (moduleName) {
|
||||
context.describe(moduleName, addTest);
|
||||
} else {
|
||||
addTest();
|
||||
}
|
||||
};
|
||||
hideFromStack(addTest);
|
||||
if (moduleStatus === "skip" || moduleStatus === "todo") {
|
||||
context.test.skip(moduleName, () => {});
|
||||
}
|
||||
return {
|
||||
name: moduleName,
|
||||
status: moduleStatus,
|
||||
before,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
after,
|
||||
addHooks(hooks) {
|
||||
if (hooks === undefined) {
|
||||
return;
|
||||
}
|
||||
if (typeof hooks === "object") {
|
||||
if (hooks.before !== undefined) {
|
||||
before.push(hooks.before);
|
||||
}
|
||||
if (hooks.beforeEach !== undefined) {
|
||||
beforeEach.push(hooks.beforeEach);
|
||||
}
|
||||
if (hooks.afterEach !== undefined) {
|
||||
afterEach.push(hooks.afterEach);
|
||||
}
|
||||
if (hooks.after !== undefined) {
|
||||
after.push(hooks.after);
|
||||
}
|
||||
} else {
|
||||
hooks({
|
||||
before(fn) {
|
||||
before.push(fn);
|
||||
},
|
||||
beforeEach(fn) {
|
||||
beforeEach.push(fn);
|
||||
},
|
||||
afterEach(fn) {
|
||||
afterEach.push(fn);
|
||||
},
|
||||
after(fn) {
|
||||
after.push(fn);
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
addTest,
|
||||
addTests(name, status, data, fn) {
|
||||
let entries: [string, unknown][];
|
||||
if (Array.isArray(data)) {
|
||||
entries = data.map(value => [inspect(value), value]);
|
||||
} else {
|
||||
entries = Object.entries(data);
|
||||
}
|
||||
for (const [key, value] of entries) {
|
||||
context.describe(name, () => {
|
||||
addTest(key, status, fn ? assert => fn(assert, value) : undefined);
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
hideFromStack(newModule);
|
||||
|
||||
function hideFromStack(object: any): void {
|
||||
if (typeof object === "function") {
|
||||
Object.defineProperty(object, "name", {
|
||||
value: "::bunternal::",
|
||||
});
|
||||
return;
|
||||
}
|
||||
for (const name of Object.getOwnPropertyNames(object)) {
|
||||
Object.defineProperty(object[name], "name", {
|
||||
value: "::bunternal::",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function todo(name: string) {
|
||||
const todo = () => {
|
||||
throw new Error(`Not implemented: QUnit.${name}`);
|
||||
};
|
||||
hideFromStack(todo);
|
||||
return todo;
|
||||
}
|
||||
|
||||
function newCallable<C, O>(callable: C, object: O): C & O {
|
||||
// @ts-expect-error
|
||||
return Object.assign(callable, object);
|
||||
}
|
||||
|
||||
function newQUnit(context: TestContext): import("./qunit.d").QUnit {
|
||||
let module: Module = newModule(context, "");
|
||||
let modules: Module[] = [module];
|
||||
const addModule = (name: string, status?: Status, hooks?: Hooks | HooksFn, fn?: HooksFn) => {
|
||||
module = newModule(context, name, status);
|
||||
modules.push(module);
|
||||
module.addHooks(hooks);
|
||||
module.addHooks(fn);
|
||||
};
|
||||
hideFromStack(addModule);
|
||||
return {
|
||||
assert: Assert.prototype,
|
||||
hooks: {
|
||||
beforeEach(fn) {
|
||||
for (const module of modules) {
|
||||
module.beforeEach.push(fn);
|
||||
}
|
||||
},
|
||||
afterEach(fn) {
|
||||
for (const module of modules) {
|
||||
module.afterEach.push(fn);
|
||||
}
|
||||
},
|
||||
},
|
||||
start() {},
|
||||
module: newCallable<
|
||||
ModuleFn,
|
||||
{
|
||||
skip: ModuleFn;
|
||||
todo: ModuleFn;
|
||||
only: ModuleFn;
|
||||
}
|
||||
>(
|
||||
(name, hooks, fn) => {
|
||||
addModule(name, undefined, hooks, fn);
|
||||
},
|
||||
{
|
||||
skip(name, hooks, fn) {
|
||||
addModule(name, "skip", hooks, fn);
|
||||
},
|
||||
todo(name, hooks, fn) {
|
||||
addModule(name, "todo", hooks, fn);
|
||||
},
|
||||
only(name, hooks, fn) {
|
||||
addModule(name, "only", hooks, fn);
|
||||
},
|
||||
},
|
||||
),
|
||||
test: newCallable<
|
||||
TestFn,
|
||||
{
|
||||
each: TestEachFn;
|
||||
skip: TestOrEachFn;
|
||||
todo: TestOrEachFn;
|
||||
only: TestOrEachFn;
|
||||
}
|
||||
>(
|
||||
(name, fn) => {
|
||||
module.addTest(name, undefined, fn);
|
||||
},
|
||||
{
|
||||
each: (name, data, fn) => {
|
||||
module.addTests(name, undefined, data, fn);
|
||||
},
|
||||
skip: newCallable<
|
||||
TestFn,
|
||||
{
|
||||
each: TestEachFn;
|
||||
}
|
||||
>(
|
||||
(name, fn) => {
|
||||
module.addTest(name, "skip", fn);
|
||||
},
|
||||
{
|
||||
each(name, data, fn) {
|
||||
module.addTests(name, "skip", data, fn);
|
||||
},
|
||||
},
|
||||
),
|
||||
todo: newCallable<
|
||||
TestFn,
|
||||
{
|
||||
each: TestEachFn;
|
||||
}
|
||||
>(
|
||||
(name, fn) => {
|
||||
module.addTest(name, "todo", fn);
|
||||
},
|
||||
{
|
||||
each(name, data, fn) {
|
||||
module.addTests(name, "todo", data, fn);
|
||||
},
|
||||
},
|
||||
),
|
||||
only: newCallable<
|
||||
TestFn,
|
||||
{
|
||||
each: TestEachFn;
|
||||
}
|
||||
>(
|
||||
(name, fn) => {
|
||||
module.addTest(name, "only", fn);
|
||||
},
|
||||
{
|
||||
each(name, data, fn) {
|
||||
module.addTests(name, "only", data, fn);
|
||||
},
|
||||
},
|
||||
),
|
||||
},
|
||||
),
|
||||
skip(name, fn) {
|
||||
module.addTest(name, "skip", fn);
|
||||
},
|
||||
todo(name, fn) {
|
||||
module.addTest(name, "todo", fn);
|
||||
},
|
||||
only(name, fn) {
|
||||
module.addTest(name, "only", fn);
|
||||
},
|
||||
dump: {
|
||||
maxDepth: Infinity,
|
||||
parse(data) {
|
||||
return inspect(data);
|
||||
},
|
||||
},
|
||||
extend(target: any, mixin) {
|
||||
return Object.assign(target, mixin);
|
||||
},
|
||||
equiv(a, b) {
|
||||
return deepEquals(a, b);
|
||||
},
|
||||
config: {},
|
||||
testDone: todo("testDone"),
|
||||
testStart: todo("testStart"),
|
||||
moduleDone: todo("moduleDone"),
|
||||
moduleStart: todo("moduleStart"),
|
||||
begin: todo("begin"),
|
||||
done: todo("done"),
|
||||
log: todo("log"),
|
||||
onUncaughtException: todo("onUncaughtException"),
|
||||
push: todo("push"),
|
||||
stack: todo("stack"),
|
||||
on: todo("on"),
|
||||
};
|
||||
}
|
||||
|
||||
const { expect, describe, test, beforeAll, beforeEach, afterEach, afterAll } = Bun.jest(import.meta.path);
|
||||
|
||||
export const QUnit = newQUnit({
|
||||
expect,
|
||||
describe,
|
||||
test,
|
||||
beforeAll,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
afterAll,
|
||||
});
|
||||
export { Assert };
|
||||
|
||||
// @ts-expect-error
|
||||
globalThis.QUnit = QUnit;
|
||||
335
packages/bun-internal-test/runners/tap/index.ts
Normal file
335
packages/bun-internal-test/runners/tap/index.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
// Not working yet, WIP
|
||||
|
||||
import { callerSourceOrigin } from "bun:jsc";
|
||||
|
||||
type EventEmitter = import("node:events").EventEmitter;
|
||||
type Expect = (value: unknown) => import("bun:test").Expect;
|
||||
type Fn = () => unknown;
|
||||
type Future = Promise<unknown> | (() => Promise<unknown>);
|
||||
type Extra = {
|
||||
[key: string | number | symbol]: unknown;
|
||||
todo?: boolean | string;
|
||||
skip?: boolean | string;
|
||||
};
|
||||
|
||||
export function test(name: string, options?: Extra, fn?: (t: Tap) => unknown): Promise<void> {
|
||||
// @ts-expect-error
|
||||
const { expect } = Bun.jest(callerSourceOrigin());
|
||||
const tap = new Tap({
|
||||
expect: expect,
|
||||
name,
|
||||
context: {},
|
||||
parent: null,
|
||||
before: [],
|
||||
after: [],
|
||||
});
|
||||
return tap.test(name, options, fn);
|
||||
}
|
||||
|
||||
/**
|
||||
* @link https://node-tap.org/docs/api/
|
||||
*/
|
||||
class Tap {
|
||||
#_expect: Expect;
|
||||
|
||||
#name: string;
|
||||
#context: unknown;
|
||||
|
||||
#parent: Tap | null;
|
||||
#children: Tap[];
|
||||
|
||||
#before: Fn[];
|
||||
#beforeEach: Fn[];
|
||||
#after: Fn[];
|
||||
#afterEach: Fn[];
|
||||
|
||||
#abort: AbortController;
|
||||
#aborted: Promise<void>;
|
||||
#timeout: number | null;
|
||||
#passing: boolean;
|
||||
#plan: number | null;
|
||||
#count: number;
|
||||
|
||||
constructor({
|
||||
name,
|
||||
context,
|
||||
parent,
|
||||
before,
|
||||
after,
|
||||
expect,
|
||||
}: {
|
||||
name?: string;
|
||||
context?: unknown;
|
||||
parent?: Tap | null;
|
||||
before?: Fn[];
|
||||
after?: Fn[];
|
||||
expect: Expect;
|
||||
}) {
|
||||
this.#_expect = expect;
|
||||
this.#name = name ?? "";
|
||||
this.#context = context ?? {};
|
||||
this.#parent = parent ?? null;
|
||||
this.#children = [];
|
||||
this.#before = before ? [...before] : [];
|
||||
this.#beforeEach = [];
|
||||
this.#after = after ? [...after] : [];
|
||||
this.#afterEach = [];
|
||||
this.#abort = new AbortController();
|
||||
this.#aborted = new Promise(resolve => {
|
||||
this.#abort.signal.addEventListener("abort", () => resolve());
|
||||
});
|
||||
this.#timeout = null;
|
||||
this.#passing = true;
|
||||
this.#plan = null;
|
||||
this.#count = 0;
|
||||
}
|
||||
|
||||
get name(): string {
|
||||
return this.#name;
|
||||
}
|
||||
|
||||
get context(): unknown {
|
||||
return this.#context;
|
||||
}
|
||||
|
||||
set context(value: unknown) {
|
||||
this.#context = value;
|
||||
}
|
||||
|
||||
get passing(): boolean {
|
||||
return this.#passing;
|
||||
}
|
||||
|
||||
#expect(value: unknown) {
|
||||
this.#count++;
|
||||
return this.#_expect(value);
|
||||
}
|
||||
|
||||
async test(name: string, options?: Extra, fn?: (t: Tap) => unknown): Promise<void> {
|
||||
if (typeof options === "function") {
|
||||
fn = options;
|
||||
options = {};
|
||||
}
|
||||
if (fn === undefined) {
|
||||
throw new Error("Missing test function");
|
||||
}
|
||||
const test = new Tap({
|
||||
expect: this.#_expect,
|
||||
name,
|
||||
context: this.#context,
|
||||
parent: this,
|
||||
before: [...this.#before, ...this.#beforeEach],
|
||||
after: [...this.#after, ...this.#afterEach],
|
||||
});
|
||||
this.#children.push(test);
|
||||
try {
|
||||
for (const fn of this.#before) {
|
||||
fn();
|
||||
}
|
||||
await fn(test);
|
||||
} catch (error) {
|
||||
test.#passing = false;
|
||||
test.#abort.abort(error);
|
||||
}
|
||||
}
|
||||
|
||||
async todo(name: string, options?: Extra, fn?: (t: Tap) => unknown): Promise<void> {
|
||||
console.warn("TODO", name);
|
||||
}
|
||||
|
||||
async skip(name: string, options?: Extra, fn?: (t: Tap) => unknown): Promise<void> {
|
||||
console.warn("SKIP", name);
|
||||
}
|
||||
|
||||
beforeEach(fn: Fn): void {
|
||||
this.#beforeEach.push(fn);
|
||||
}
|
||||
|
||||
afterEach(fn: Fn): void {
|
||||
this.#afterEach.push(fn);
|
||||
}
|
||||
|
||||
before(fn: Fn): void {
|
||||
this.#before.push(fn);
|
||||
}
|
||||
|
||||
teardown(fn: Fn): void {
|
||||
this.#after.push(fn);
|
||||
}
|
||||
|
||||
setTimeout(timeout: number): void {
|
||||
if (timeout === 0) {
|
||||
if (this.#timeout !== null) {
|
||||
clearTimeout(this.#timeout);
|
||||
}
|
||||
} else {
|
||||
const fn = () => {
|
||||
this.#abort.abort(new Error("Timed out"));
|
||||
};
|
||||
this.#timeout = +setTimeout(fn, timeout);
|
||||
}
|
||||
}
|
||||
|
||||
pragma(options: Record<string, unknown>): void {
|
||||
throw new TODO("pragma");
|
||||
}
|
||||
|
||||
plan(count: number, comment?: string): void {
|
||||
if (this.#plan !== null) {
|
||||
throw new Error("Plan already set");
|
||||
}
|
||||
this.#plan = count;
|
||||
}
|
||||
|
||||
pass(message?: string, extra?: Extra): void {
|
||||
// TODO
|
||||
}
|
||||
|
||||
fail(message?: string, extra?: Extra): void {
|
||||
// TODO
|
||||
}
|
||||
|
||||
end(): void {
|
||||
if (this.#abort.signal.aborted) {
|
||||
throw new Error("Test already ended");
|
||||
}
|
||||
this.#abort.abort();
|
||||
}
|
||||
|
||||
endAll(): void {
|
||||
for (const child of this.#children) {
|
||||
child.endAll();
|
||||
}
|
||||
this.end();
|
||||
}
|
||||
|
||||
autoend(value: boolean): void {
|
||||
throw new TODO("autoend");
|
||||
}
|
||||
|
||||
bailout(reason?: string): void {
|
||||
throw new TODO("bailout");
|
||||
}
|
||||
|
||||
ok(value: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(value).toBeTruthy();
|
||||
}
|
||||
|
||||
notOk(value: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(value).toBeFalsy();
|
||||
}
|
||||
|
||||
error(value: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(value).toBeInstanceOf(Error);
|
||||
}
|
||||
|
||||
async emits(eventEmitter: EventEmitter, event: string, message?: string, extra?: Extra): Promise<void> {
|
||||
throw new TODO("emits");
|
||||
}
|
||||
|
||||
async rejects(value: Future, expectedError?: Error, message?: string, extra?: Extra): Promise<void> {
|
||||
throw new TODO("rejects");
|
||||
}
|
||||
|
||||
async resolves(value: Future, message?: string, extra?: Extra): Promise<void> {
|
||||
throw new TODO("resolves");
|
||||
}
|
||||
|
||||
async resolveMatch(value: Future, expected: unknown, message?: string, extra?: Extra): Promise<void> {
|
||||
throw new TODO("resolveMatch");
|
||||
}
|
||||
|
||||
async resolveMatchSnapshot(value: Future, message?: string, extra?: Extra): Promise<void> {
|
||||
throw new TODO("resolveMatchSnapshot");
|
||||
}
|
||||
|
||||
throws(fn: Fn, expectedError?: Error, message?: string, extra?: Extra): void {
|
||||
this.#expect(fn).toThrow(expectedError);
|
||||
}
|
||||
|
||||
doesNotThrow(fn: Fn, message?: string, extra?: Extra): void {
|
||||
throw new TODO("doesNotThrow");
|
||||
}
|
||||
|
||||
expectUncaughtException(expectedError?: Error, message?: string, extra?: Extra): void {
|
||||
throw new TODO("expectUncaughtException");
|
||||
}
|
||||
|
||||
equal(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).toBe(expected);
|
||||
}
|
||||
|
||||
not(expected: unknown, actual: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).not.toBe(expected);
|
||||
}
|
||||
|
||||
same(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).toEqual(expected);
|
||||
}
|
||||
|
||||
notSame(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).not.toEqual(expected);
|
||||
}
|
||||
|
||||
strictSame(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).toStrictEqual(expected);
|
||||
}
|
||||
|
||||
strictNotSame(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
this.#expect(actual).not.toStrictEqual(expected);
|
||||
}
|
||||
|
||||
hasStrict(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("hasStrict");
|
||||
}
|
||||
|
||||
notHasStrict(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("notHasStrict");
|
||||
}
|
||||
|
||||
has(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("has");
|
||||
}
|
||||
|
||||
notHas(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("notHas");
|
||||
}
|
||||
|
||||
hasProp(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("hasProp");
|
||||
}
|
||||
|
||||
hasProps(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("hasProps");
|
||||
}
|
||||
|
||||
hasOwnProp(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("hasOwnProp");
|
||||
}
|
||||
|
||||
hasOwnProps(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("hasOwnProps");
|
||||
}
|
||||
|
||||
match(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("match");
|
||||
}
|
||||
|
||||
notMatch(actual: unknown, expected: unknown, message?: string, extra?: Extra): void {
|
||||
throw new TODO("notMatch");
|
||||
}
|
||||
|
||||
type(actual: unknown, type: string, message?: string, extra?: Extra): void {
|
||||
const types = ["string", "number", "boolean", "object", "function", "undefined", "symbol", "bigint"];
|
||||
if (type in types) {
|
||||
return this.#expect(typeof actual).toBe(type);
|
||||
}
|
||||
this.#expect(actual?.constructor?.name).toBe(type);
|
||||
}
|
||||
}
|
||||
|
||||
class TODO extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
78
packages/bun-internal-test/scripts/html.ts
Normal file
78
packages/bun-internal-test/scripts/html.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { escapeHTML } from "bun";
|
||||
|
||||
export function table(headers: unknown[], rows: unknown[][]): string {
|
||||
return (
|
||||
"<table>" +
|
||||
headers.reduce((html, header) => html + `<th>${header}</th>`, "<tr>") +
|
||||
"</tr>" +
|
||||
rows.reduce((html, row) => html + row.reduce((html, cell) => html + `<td>${cell}</td>`, "<tr>") + "</tr>", "") +
|
||||
"</table>"
|
||||
);
|
||||
}
|
||||
|
||||
export function h(level: number, content: string): string {
|
||||
return `<h${level}>${content}</h${level}>`;
|
||||
}
|
||||
|
||||
export function ul(items: unknown[]): string {
|
||||
return items.reduce((html, item) => html + `<li>${item}</li>`, "<ul>") + "</ul>";
|
||||
}
|
||||
|
||||
export function a(content: string, baseUrl?: string, url?: string): string {
|
||||
const href = baseUrl && url ? new URL(url, baseUrl).toString() : baseUrl;
|
||||
return href ? `<a href="${href}">${escape(content)}</a>` : escape(content);
|
||||
}
|
||||
|
||||
export function br(n: number = 1): string {
|
||||
return "<br/>".repeat(n);
|
||||
}
|
||||
|
||||
export function details(summary: string, details: string): string {
|
||||
return `<details><summary>${summary}</summary>${details}</details>`;
|
||||
}
|
||||
|
||||
export function code(content: string, lang: string = ""): string {
|
||||
return `<pre lang="${lang}"><code>${escape(content)}</code></pre>`;
|
||||
}
|
||||
|
||||
export function escape(content: string): string {
|
||||
return escapeHTML(content).replace(/\+/g, "+").replace(/\-/g, "-").replace(/\*/g, "*");
|
||||
}
|
||||
|
||||
export function percent(numerator: number, demonimator: number): number {
|
||||
const percent = Math.floor((numerator / demonimator) * 100);
|
||||
if (isNaN(percent) || percent < 0) {
|
||||
return 0;
|
||||
}
|
||||
if (percent >= 100) {
|
||||
return numerator >= demonimator ? 100 : 99;
|
||||
}
|
||||
return percent;
|
||||
}
|
||||
|
||||
export function count(n: number): string {
|
||||
return n ? `${n}` : "";
|
||||
}
|
||||
|
||||
export function duration(milliseconds: number): string {
|
||||
if (milliseconds === 0) {
|
||||
return "";
|
||||
}
|
||||
if (milliseconds < 1000) {
|
||||
return `${Math.ceil(milliseconds)} ms`;
|
||||
}
|
||||
const seconds = Math.floor(milliseconds / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
let result = [];
|
||||
if (hours) {
|
||||
result.push(`${hours}h`);
|
||||
}
|
||||
if (minutes) {
|
||||
result.push(`${minutes % 60}m`);
|
||||
}
|
||||
if (seconds) {
|
||||
result.push(`${seconds % 60}s`);
|
||||
}
|
||||
return result.join(" ");
|
||||
}
|
||||
154
packages/bun-internal-test/scripts/run-bun-tests.ts
Normal file
154
packages/bun-internal-test/scripts/run-bun-tests.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { a, br, code, count, duration, h, table, ul } from "html";
|
||||
import { appendFileSync } from "node:fs";
|
||||
import { basename, resolve } from "node:path";
|
||||
import { TestError, TestStatus, printTest, runTests } from "runner";
|
||||
|
||||
const cwd = resolve(import.meta.dir, "..", "..", "..", "test");
|
||||
const filters = process.argv.slice(2); // TODO
|
||||
|
||||
let result;
|
||||
const tests = runTests({
|
||||
cwd,
|
||||
filters: ["*.test.ts", "*.test.js", "*.test.cjs", "*.test.mjs", "*.test.jsx", "*.test.tsx"],
|
||||
env: {
|
||||
// "BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
timeout: 30_000,
|
||||
});
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await tests.next();
|
||||
if (done) {
|
||||
result = value;
|
||||
break;
|
||||
} else {
|
||||
printTest(value);
|
||||
}
|
||||
}
|
||||
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
const outputPath = process.env["GITHUB_OUTPUT"];
|
||||
if (summaryPath) {
|
||||
const server = process.env["GITHUB_SERVER_URL"] ?? "https://github.com";
|
||||
const repository = process.env["GITHUB_REPOSITORY"] ?? "oven-sh/bun";
|
||||
const baseUrl = `${server}/${repository}/tree/${result.info.revision}/test/`;
|
||||
|
||||
let failures: string = "";
|
||||
let summaries: string[][] = [];
|
||||
let totalSummary = [
|
||||
icon("pass") + " " + result.summary.pass,
|
||||
icon("fail") + " " + result.summary.fail,
|
||||
icon("skip") + " " + result.summary.skip,
|
||||
icon("todo") + " " + result.summary.todo,
|
||||
duration(result.summary.duration),
|
||||
];
|
||||
|
||||
const sortedFiles = result.files.sort((a, b) => {
|
||||
if (a.status === b.status) {
|
||||
return a.file.localeCompare(b.file);
|
||||
}
|
||||
const order = {
|
||||
fail: 10,
|
||||
pass: 0,
|
||||
skip: -1,
|
||||
todo: -2,
|
||||
};
|
||||
return order[b.status] - order[a.status];
|
||||
});
|
||||
|
||||
for (const { file, status, summary } of sortedFiles) {
|
||||
summaries.push([
|
||||
a(basename(file), baseUrl, file),
|
||||
icon(status),
|
||||
count(summary.pass),
|
||||
count(summary.fail),
|
||||
count(summary.skip),
|
||||
count(summary.todo),
|
||||
duration(summary.duration),
|
||||
]);
|
||||
}
|
||||
|
||||
const failedFiles = sortedFiles.filter(({ status }) => status === "fail");
|
||||
|
||||
for (const { file, tests, errors } of failedFiles) {
|
||||
const testErrors: TestError[] = [];
|
||||
|
||||
if (errors?.length) {
|
||||
testErrors.push(...errors);
|
||||
}
|
||||
for (const { errors } of tests) {
|
||||
if (errors?.length) {
|
||||
testErrors.push(...errors);
|
||||
}
|
||||
}
|
||||
|
||||
const failedTests = tests.filter(({ status }) => status === "fail");
|
||||
|
||||
const lines: string[] = [];
|
||||
for (const { name, errors } of failedTests) {
|
||||
let line = a(name, link(baseUrl, file, errors));
|
||||
if (!errors?.length) {
|
||||
lines.push(line);
|
||||
continue;
|
||||
}
|
||||
line += br(2);
|
||||
for (const error of errors) {
|
||||
line += preview(error);
|
||||
}
|
||||
lines.push(line);
|
||||
}
|
||||
|
||||
failures += h(3, a(file, link(baseUrl, file, testErrors)));
|
||||
failures += ul(lines);
|
||||
}
|
||||
|
||||
let summary =
|
||||
h(2, "Summary") +
|
||||
table(["Passed", "Failed", "Skipped", "Todo", "Duration"], [totalSummary]) +
|
||||
table(["File", "Status", "Passed", "Failed", "Skipped", "Todo", "Duration"], summaries) +
|
||||
h(2, "Errors") +
|
||||
failures;
|
||||
appendFileSync(summaryPath, summary, "utf-8");
|
||||
|
||||
if (outputPath && failedFiles.length) {
|
||||
appendFileSync(outputPath, `\nfailing_tests_count=${failedFiles.length}`, "utf-8");
|
||||
const rng = Math.ceil(Math.random() * 10_000);
|
||||
const value = failedFiles.map(({ file }) => ` - \`${file}\``).join("\n");
|
||||
appendFileSync(outputPath, `\nfailing_tests<<${rng}\n${value}\n${rng}`, "utf-8");
|
||||
}
|
||||
}
|
||||
|
||||
function icon(status: TestStatus) {
|
||||
switch (status) {
|
||||
case "pass":
|
||||
return "✅";
|
||||
case "fail":
|
||||
return "❌";
|
||||
case "skip":
|
||||
return "⏭️";
|
||||
case "todo":
|
||||
return "📝";
|
||||
}
|
||||
}
|
||||
|
||||
function link(baseUrl: string, fileName: string, errors?: TestError[]): string {
|
||||
const url = new URL(fileName, baseUrl);
|
||||
loop: for (const { stack } of errors ?? []) {
|
||||
for (const location of stack ?? []) {
|
||||
if (location.file.endsWith(fileName)) {
|
||||
url.hash = `L${location.line}`;
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
function preview(error: TestError): string {
|
||||
const { name, message, preview } = error;
|
||||
let result = code(`${name}: ${message}`, "diff");
|
||||
if (preview) {
|
||||
result += code(preview, "typescript");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
177
packages/bun-internal-test/scripts/run-ecosystem-tests.ts
Normal file
177
packages/bun-internal-test/scripts/run-ecosystem-tests.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import { a, br, code, count, details, duration, h, percent, table, ul } from "html";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { appendFileSync, existsSync, readFileSync } from "node:fs";
|
||||
import { Test, TestError, TestFile, TestStatus, TestSummary, printTest, runTests } from "runner";
|
||||
|
||||
const [filter] = process.argv.slice(2);
|
||||
const packagesText = readFileSync(resolve("resources/packages.json"), "utf8");
|
||||
const packagesList: Package[] = JSON.parse(packagesText);
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
|
||||
type Package = {
|
||||
name: string;
|
||||
repository: {
|
||||
github: string;
|
||||
commit?: string;
|
||||
};
|
||||
test?: {
|
||||
runner: "bun" | "jest" | "qunit" | "mocha" | "tap";
|
||||
path: string;
|
||||
skip?: boolean | string;
|
||||
env?: Record<string, string>;
|
||||
args?: string[];
|
||||
};
|
||||
};
|
||||
|
||||
let summary = h(2, "Summary");
|
||||
let summaries: string[][] = [];
|
||||
let errors = h(2, "Errors");
|
||||
|
||||
for (const pkg of packagesList) {
|
||||
const { name, test } = pkg;
|
||||
if (filter && !name.includes(filter)) {
|
||||
continue;
|
||||
}
|
||||
const cwd = gitClone(pkg);
|
||||
if (!test || test.skip) {
|
||||
continue;
|
||||
}
|
||||
const { runner, path, args, env } = test;
|
||||
const preload: string[] = [];
|
||||
if (runner === "qunit") {
|
||||
preload.push(resolve("runners/qunit/qunit.ts"));
|
||||
}
|
||||
if (runner === "tap" || runner === "mocha") {
|
||||
continue; // TODO
|
||||
}
|
||||
const tests = runTests({
|
||||
cwd,
|
||||
filters: [path],
|
||||
preload,
|
||||
args,
|
||||
env,
|
||||
timeout: 5000,
|
||||
});
|
||||
let result;
|
||||
while (true) {
|
||||
const { value, done } = await tests.next();
|
||||
if (done) {
|
||||
result = value;
|
||||
break;
|
||||
} else if (filter || value.summary.fail) {
|
||||
printTest(value);
|
||||
}
|
||||
}
|
||||
if (!summaryPath) {
|
||||
continue;
|
||||
}
|
||||
const { summary, files } = result;
|
||||
const baseUrl = htmlUrl(pkg);
|
||||
summaries.push([
|
||||
a(name, baseUrl),
|
||||
htmlStatus(summary),
|
||||
count(summary.pass),
|
||||
count(summary.fail),
|
||||
count(summary.skip),
|
||||
duration(summary.duration),
|
||||
]);
|
||||
let breakdown = "";
|
||||
const isFailed = ({ status }: { status: TestStatus }) => status === "fail";
|
||||
for (const file of files.filter(isFailed)) {
|
||||
breakdown += h(3, a(file.file, htmlLink(baseUrl, file)));
|
||||
for (const error of file.errors ?? []) {
|
||||
breakdown += htmlError(error);
|
||||
}
|
||||
let entries: string[] = [];
|
||||
for (const test of file.tests.filter(isFailed)) {
|
||||
let entry = a(test.name, htmlLink(baseUrl, file, test));
|
||||
if (!test.errors?.length) {
|
||||
entries.push(entry);
|
||||
continue;
|
||||
}
|
||||
entry += br(2);
|
||||
for (const error of test.errors) {
|
||||
entry += htmlError(error);
|
||||
}
|
||||
entries.push(entry);
|
||||
}
|
||||
if (!entries.length && !file.errors?.length) {
|
||||
breakdown += code("Test failed, but no errors were found.");
|
||||
} else {
|
||||
breakdown += ul(entries);
|
||||
}
|
||||
}
|
||||
if (breakdown) {
|
||||
errors += details(a(name, baseUrl), breakdown);
|
||||
}
|
||||
}
|
||||
|
||||
if (summaryPath) {
|
||||
let html = summary + table(["Package", "Status", "Passed", "Failed", "Skipped", "Duration"], summaries) + errors;
|
||||
appendFileSync(summaryPath, html, "utf-8");
|
||||
}
|
||||
|
||||
function htmlLink(baseUrl: string, file: TestFile, test?: Test): string {
|
||||
const url = new URL(file.file, baseUrl);
|
||||
const errors = (test ? test.errors : file.errors) ?? [];
|
||||
loop: for (const { stack } of errors) {
|
||||
for (const location of stack ?? []) {
|
||||
if (test || location.file.endsWith(file.file)) {
|
||||
url.hash = `L${location.line}`;
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
function htmlStatus(summary: TestSummary): string {
|
||||
const ratio = percent(summary.pass, summary.tests);
|
||||
if (ratio >= 95) {
|
||||
return `✅ ${ratio}%`;
|
||||
}
|
||||
if (ratio >= 75) {
|
||||
return `⚠️ ${ratio}%`;
|
||||
}
|
||||
return `❌ ${ratio}%`;
|
||||
}
|
||||
|
||||
function htmlError(error: TestError): string {
|
||||
const { name, message, preview } = error;
|
||||
let result = code(`${name}: ${message}`, "diff");
|
||||
if (preview) {
|
||||
result += code(preview, "typescript");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function htmlUrl(pkg: Package): string {
|
||||
const { repository } = pkg;
|
||||
const { github, commit } = repository;
|
||||
return `https://github.com/${github}/tree/${commit}/`;
|
||||
}
|
||||
|
||||
function gitClone(pkg: Package): string {
|
||||
const { name, repository } = pkg;
|
||||
const path = resolve(`packages/${name}`);
|
||||
if (!existsSync(path)) {
|
||||
const url = `https://github.com/${repository.github}.git`;
|
||||
spawnSync("git", ["clone", "--single-branch", "--depth=1", url, path], {
|
||||
stdio: "inherit",
|
||||
});
|
||||
spawnSync("bun", ["install"], {
|
||||
cwd: path,
|
||||
stdio: "inherit",
|
||||
});
|
||||
}
|
||||
const { stdout } = spawnSync("git", ["rev-parse", "HEAD"], {
|
||||
cwd: path,
|
||||
stdio: "pipe",
|
||||
});
|
||||
repository.commit = stdout.toString().trim();
|
||||
return path;
|
||||
}
|
||||
|
||||
function resolve(path: string): string {
|
||||
return new URL(`../${path}`, import.meta.url).pathname;
|
||||
}
|
||||
606
packages/bun-internal-test/src/runner.node.mjs
Normal file
606
packages/bun-internal-test/src/runner.node.mjs
Normal file
@@ -0,0 +1,606 @@
|
||||
import * as action from "@actions/core";
|
||||
import { spawn, spawnSync } from "child_process";
|
||||
import { closeSync, mkdirSync, openSync, readFileSync, rmSync, writeFileSync } from "fs";
|
||||
import { readdirSync } from "node:fs";
|
||||
import { basename, resolve } from "node:path";
|
||||
import { cpus, hostname, tmpdir, totalmem, userInfo } from "os";
|
||||
import PQueue from "p-queue";
|
||||
import { join, normalize, posix, relative } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const run_start = new Date();
|
||||
const TIMEOUT_DURATION = 1000 * 60 * 5;
|
||||
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
|
||||
|
||||
function defaultConcurrency() {
|
||||
// This causes instability due to the number of open file descriptors / sockets in some tests
|
||||
// Windows has higher limits
|
||||
if (process.platform !== "win32") {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return Math.min(Math.floor((cpus().length - 2) / 2), 2);
|
||||
}
|
||||
const windows = process.platform === "win32";
|
||||
const nativeMemory = totalmem();
|
||||
const force_ram_size_input = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
|
||||
let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
|
||||
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
|
||||
force_ram_size = force_ram_size_input + "";
|
||||
}
|
||||
function uncygwinTempDir() {
|
||||
if (process.platform === "win32") {
|
||||
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
|
||||
let TMPDIR = process.env[key] || "";
|
||||
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const driveLetter = TMPDIR[1];
|
||||
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
|
||||
process.env[key] = TMPDIR;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uncygwinTempDir();
|
||||
|
||||
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
|
||||
process.chdir(cwd);
|
||||
|
||||
const ci = !!process.env["GITHUB_ACTIONS"];
|
||||
const enableProgressBar = false;
|
||||
|
||||
const dirPrefix = "bun-test-tmp-" + ((Math.random() * 100_000_0) | 0).toString(36) + "_";
|
||||
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
|
||||
const queue = new PQueue({ concurrency: run_concurrency });
|
||||
|
||||
var prevTmpdir = "";
|
||||
function maketemp() {
|
||||
prevTmpdir = join(
|
||||
tmpdir(),
|
||||
dirPrefix + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
|
||||
);
|
||||
mkdirSync(prevTmpdir, { recursive: true });
|
||||
return prevTmpdir;
|
||||
}
|
||||
|
||||
const extensions = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs", ".mts", ".cts", ".mjsx", ".cjsx", ".mtsx", ".ctsx"];
|
||||
|
||||
const git_sha =
|
||||
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
|
||||
|
||||
const TEST_FILTER = process.env.BUN_TEST_FILTER;
|
||||
|
||||
function isTest(path) {
|
||||
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (TEST_FILTER) {
|
||||
if (!path.includes(TEST_FILTER)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function* findTests(dir, query) {
|
||||
for (const entry of readdirSync(resolve(dir), { encoding: "utf-8", withFileTypes: true })) {
|
||||
const path = resolve(dir, entry.name);
|
||||
if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== ".git") {
|
||||
yield* findTests(path, query);
|
||||
} else if (isTest(path)) {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let bunExe = "bun";
|
||||
|
||||
if (process.argv.length > 2) {
|
||||
bunExe = resolve(process.argv.at(-1));
|
||||
} else if (process.env.BUN_PATH) {
|
||||
const { BUN_PATH_BASE, BUN_PATH } = process.env;
|
||||
bunExe = resolve(normalize(BUN_PATH_BASE), normalize(BUN_PATH));
|
||||
}
|
||||
|
||||
const { error, stdout: revision_stdout } = spawnSync(bunExe, ["--revision"], {
|
||||
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
|
||||
});
|
||||
if (error) {
|
||||
if (error.code !== "ENOENT") throw error;
|
||||
console.error(`\x1b[31merror\x1b[0;2m:\x1b[0m Could not find Bun executable at '${bunExe}'`);
|
||||
process.exit(1);
|
||||
}
|
||||
const revision = revision_stdout.toString().trim();
|
||||
|
||||
const { error: error2, stdout: argv0_stdout } = spawnSync(bunExe, ["-e", "console.log(process.argv[0])"], {
|
||||
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
|
||||
});
|
||||
if (error2) throw error2;
|
||||
const argv0 = argv0_stdout.toString().trim();
|
||||
|
||||
console.log(`Testing ${argv0} v${revision}`);
|
||||
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.26100.0\\shared\\ntstatus.h";
|
||||
let ntstatus_header_cache = null;
|
||||
function lookupWindowsError(code) {
|
||||
if (ntstatus_header_cache === null) {
|
||||
try {
|
||||
ntstatus_header_cache = readFileSync(ntStatusPath, "utf-8");
|
||||
} catch {
|
||||
console.error(`could not find ntstatus.h to lookup error code: ${ntStatusPath}`);
|
||||
ntstatus_header_cache = "";
|
||||
}
|
||||
}
|
||||
const match = ntstatus_header_cache.match(new RegExp(`(STATUS_\\w+).*0x${code.toString(16)}`, "i"));
|
||||
if (match) {
|
||||
return match[1];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const failing_tests = [];
|
||||
const passing_tests = [];
|
||||
let maxFd = -1;
|
||||
function getMaxFileDescriptor(path) {
|
||||
if (process.platform === "win32") {
|
||||
return -1;
|
||||
}
|
||||
|
||||
hasInitialMaxFD = true;
|
||||
|
||||
if (process.platform === "linux" || process.platform === "darwin") {
|
||||
try {
|
||||
readdirSync(process.platform === "darwin" ? "/dev/fd" : "/proc/self/fd").forEach(name => {
|
||||
const fd = parseInt(name.trim(), 10);
|
||||
if (Number.isSafeInteger(fd) && fd >= 0) {
|
||||
maxFd = Math.max(maxFd, fd);
|
||||
}
|
||||
});
|
||||
|
||||
return maxFd;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const devnullfd = openSync("/dev/null", "r");
|
||||
closeSync(devnullfd);
|
||||
maxFd = devnullfd + 1;
|
||||
return maxFd;
|
||||
}
|
||||
let hasInitialMaxFD = false;
|
||||
|
||||
const activeTests = new Map();
|
||||
|
||||
let slowTestCount = 0;
|
||||
function checkSlowTests() {
|
||||
const now = Date.now();
|
||||
const prevSlowTestCount = slowTestCount;
|
||||
slowTestCount = 0;
|
||||
for (const [path, { start, proc }] of activeTests) {
|
||||
if (proc && now - start >= TIMEOUT_DURATION) {
|
||||
console.error(
|
||||
`\x1b[31merror\x1b[0;2m:\x1b[0m Killing test ${JSON.stringify(path)} after ${Math.ceil((now - start) / 1000)}s`,
|
||||
);
|
||||
proc?.stdout?.destroy?.();
|
||||
proc?.stderr?.destroy?.();
|
||||
proc?.kill?.(9);
|
||||
} else if (now - start > SHORT_TIMEOUT_DURATION) {
|
||||
console.error(
|
||||
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
|
||||
(now - start) / 1000,
|
||||
)}s`,
|
||||
);
|
||||
slowTestCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (slowTestCount > prevSlowTestCount && queue.concurrency > 1) {
|
||||
queue.concurrency += 1;
|
||||
}
|
||||
}
|
||||
|
||||
setInterval(checkSlowTests, SHORT_TIMEOUT_DURATION).unref();
|
||||
var currentTestNumber = 0;
|
||||
async function runTest(path) {
|
||||
const pathOnDisk = resolve(path);
|
||||
const thisTestNumber = currentTestNumber++;
|
||||
const testFileName = posix.normalize(relative(cwd, path).replaceAll("\\", "/"));
|
||||
let exitCode, signal, err, output;
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const activeTestObject = { start, proc: undefined };
|
||||
activeTests.set(testFileName, activeTestObject);
|
||||
|
||||
try {
|
||||
await new Promise((finish, reject) => {
|
||||
const chunks = [];
|
||||
process.stderr.write(
|
||||
`
|
||||
at ${((start - run_start.getTime()) / 1000).toFixed(2)}s, file ${thisTestNumber
|
||||
.toString()
|
||||
.padStart(total.toString().length, "0")}/${total}, ${failing_tests.length} failing files
|
||||
Starting "${testFileName}"
|
||||
|
||||
`,
|
||||
);
|
||||
const TMPDIR = maketemp();
|
||||
const proc = spawn(bunExe, ["test", pathOnDisk], {
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
FORCE_COLOR: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
|
||||
BUN_JSC_forceRAMSize: force_ram_size,
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
|
||||
BUN_ENABLE_CRASH_REPORTING: "0",
|
||||
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
|
||||
},
|
||||
});
|
||||
activeTestObject.proc = proc;
|
||||
proc.stdout.once("end", () => {
|
||||
done();
|
||||
});
|
||||
|
||||
let doneCalls = 0;
|
||||
var done = () => {
|
||||
// TODO: wait for stderr as well
|
||||
// spawn.test currently causes it to hang
|
||||
if (doneCalls++ === 1) {
|
||||
actuallyDone();
|
||||
}
|
||||
};
|
||||
var actuallyDone = function () {
|
||||
actuallyDone = done = () => {};
|
||||
proc?.stderr?.unref?.();
|
||||
proc?.stdout?.unref?.();
|
||||
proc?.unref?.();
|
||||
output = Buffer.concat(chunks).toString();
|
||||
finish();
|
||||
};
|
||||
|
||||
// if (!KEEP_TMPDIR)
|
||||
// proc.once("close", () => {
|
||||
// rm(TMPDIR, { recursive: true, force: true }).catch(() => {});
|
||||
// });
|
||||
|
||||
proc.stdout.on("data", chunk => {
|
||||
chunks.push(chunk);
|
||||
if (run_concurrency === 1) process.stdout.write(chunk);
|
||||
});
|
||||
proc.stderr.on("data", chunk => {
|
||||
chunks.push(chunk);
|
||||
if (run_concurrency === 1) process.stderr.write(chunk);
|
||||
});
|
||||
|
||||
proc.once("close", () => {
|
||||
activeTestObject.proc = undefined;
|
||||
});
|
||||
|
||||
proc.once("exit", (code_, signal_) => {
|
||||
activeTestObject.proc = undefined;
|
||||
exitCode = code_;
|
||||
signal = signal_;
|
||||
if (signal || exitCode !== 0) {
|
||||
actuallyDone();
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
});
|
||||
proc.once("error", err_ => {
|
||||
activeTestObject.proc = undefined;
|
||||
err = err_;
|
||||
actuallyDone();
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
activeTests.delete(testFileName);
|
||||
}
|
||||
|
||||
if (!hasInitialMaxFD) {
|
||||
getMaxFileDescriptor();
|
||||
} else if (maxFd > 0) {
|
||||
const prevMaxFd = maxFd;
|
||||
maxFd = getMaxFileDescriptor();
|
||||
if (maxFd > prevMaxFd + queue.concurrency * 2) {
|
||||
process.stderr.write(
|
||||
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${testFileName}, delta: ${
|
||||
maxFd - prevMaxFd
|
||||
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const passed = exitCode === 0 && !err && !signal;
|
||||
|
||||
let reason = "";
|
||||
if (!passed) {
|
||||
let match;
|
||||
if (err && err.message.includes("timed")) {
|
||||
reason = "hang";
|
||||
} else if ((match = output && output.match(/thread \d+ panic: (.*)\n/))) {
|
||||
reason = 'panic "' + match[1] + '"';
|
||||
} else if (err) {
|
||||
reason = (err.name || "Error") + ": " + err.message;
|
||||
} else if (signal) {
|
||||
reason = signal;
|
||||
} else if (exitCode === 1) {
|
||||
const failMatch = output.match(/\x1b\[31m\s(\d+) fail/);
|
||||
if (failMatch) {
|
||||
reason = failMatch[1] + " failing";
|
||||
} else {
|
||||
reason = "code 1";
|
||||
}
|
||||
} else {
|
||||
const x = windows && lookupWindowsError(exitCode);
|
||||
if (x) {
|
||||
if (x === "STATUS_BREAKPOINT") {
|
||||
if (output.includes("Segmentation fault at address")) {
|
||||
reason = "STATUS_ACCESS_VIOLATION";
|
||||
}
|
||||
}
|
||||
reason = x;
|
||||
} else {
|
||||
reason = "code " + exitCode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const duration = (Date.now() - start) / 1000;
|
||||
|
||||
if (run_concurrency !== 1 && enableProgressBar) {
|
||||
// clear line
|
||||
process.stdout.write("\x1b[2K\r");
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
|
||||
passed ? "\x1b[32m✔" : "\x1b[31m✖"
|
||||
} ${testFileName}\x1b[0m${reason ? ` (${reason})` : ""}`,
|
||||
);
|
||||
|
||||
finished++;
|
||||
|
||||
if (run_concurrency !== 1 && enableProgressBar) {
|
||||
writeProgressBar();
|
||||
}
|
||||
|
||||
if (run_concurrency > 1 && ci) {
|
||||
process.stderr.write(output);
|
||||
}
|
||||
|
||||
if (!passed) {
|
||||
failing_tests.push({ path: testFileName, reason, output });
|
||||
process.exitCode = 1;
|
||||
if (err) console.error(err);
|
||||
} else {
|
||||
passing_tests.push(testFileName);
|
||||
}
|
||||
|
||||
return passed;
|
||||
}
|
||||
|
||||
var finished = 0;
|
||||
|
||||
function writeProgressBar() {
|
||||
const barWidth = Math.min(process.stdout.columns || 40, 80) - 2;
|
||||
const percent = (finished / total) * 100;
|
||||
const bar = "=".repeat(Math.floor(percent / 2));
|
||||
const str1 = `[${finished}/${total}] [${bar}`;
|
||||
process.stdout.write(`\r${str1}${" ".repeat(barWidth - str1.length)}]`);
|
||||
}
|
||||
|
||||
const allTests = [...findTests(resolve(cwd, "test"))];
|
||||
console.log(`Starting ${allTests.length} tests with ${run_concurrency} concurrency...`);
|
||||
let total = allTests.length;
|
||||
for (const path of allTests) {
|
||||
queue.add(
|
||||
async () =>
|
||||
await runTest(path).catch(e => {
|
||||
console.error("Bug in bun-internal-test");
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}),
|
||||
);
|
||||
}
|
||||
await queue.onIdle();
|
||||
console.log(`
|
||||
Completed ${total} tests with ${failing_tests.length} failing tests
|
||||
`);
|
||||
console.log("\n");
|
||||
|
||||
function linkToGH(linkTo) {
|
||||
return `https://github.com/oven-sh/bun/blob/${git_sha}/${linkTo}`;
|
||||
}
|
||||
|
||||
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
|
||||
passing_tests.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
const failingTestDisplay = failing_tests.map(({ path, reason }) => `- \`${path}\` ${reason}`).join("\n");
|
||||
|
||||
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
|
||||
|
||||
rmSync("report.md", { force: true });
|
||||
|
||||
const uptime = process.uptime();
|
||||
|
||||
function formatTime(seconds) {
|
||||
if (seconds < 60) {
|
||||
return seconds.toFixed(1) + "s";
|
||||
} else if (seconds < 60 * 60) {
|
||||
return (seconds / 60).toFixed(0) + "m " + formatTime(seconds % 60);
|
||||
} else {
|
||||
return (seconds / 60 / 60).toFixed(0) + "h " + formatTime(seconds % (60 * 60));
|
||||
}
|
||||
}
|
||||
|
||||
const header = `
|
||||
host: ${process.env["GITHUB_RUN_ID"] ? "GitHub Actions: " : ""}${userInfo().username}@${hostname()}
|
||||
platform: ${process.platform} ${process.arch}
|
||||
bun: ${argv0}
|
||||
version: v${revision}
|
||||
|
||||
date: ${run_start.toISOString()}
|
||||
duration: ${formatTime(uptime)}
|
||||
|
||||
total: ${total} files
|
||||
failing: ${failing_tests.length} files
|
||||
passing: ${passing_tests.length} files
|
||||
|
||||
percent: ${((passing_tests.length / total) * 100).toFixed(2)}%
|
||||
`.trim();
|
||||
|
||||
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
|
||||
console.log(header);
|
||||
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
|
||||
|
||||
let report = `# bun test on ${
|
||||
process.env["GITHUB_REF"] ??
|
||||
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
|
||||
}
|
||||
|
||||
\`\`\`
|
||||
${header}
|
||||
\`\`\`
|
||||
|
||||
`;
|
||||
|
||||
if (failingTestDisplay.length > 0) {
|
||||
report += `## Failing tests\n\n`;
|
||||
report += failingTestDisplay;
|
||||
report += "\n\n";
|
||||
}
|
||||
|
||||
// if(passingTestDisplay.length > 0) {
|
||||
// report += `## Passing tests\n\n`;
|
||||
// report += passingTestDisplay;
|
||||
// report += "\n\n";
|
||||
// }
|
||||
|
||||
if (failing_tests.length) {
|
||||
report += `## Failing tests log output\n\n`;
|
||||
for (const { path, output, reason } of failing_tests) {
|
||||
report += `### ${path}\n\n`;
|
||||
report += "[Link to file](" + linkToGH(path) + ")\n\n";
|
||||
report += `${reason}\n\n`;
|
||||
report += "```\n";
|
||||
|
||||
let failing_output = output
|
||||
.replace(/\x1b\[[0-9;]*m/g, "")
|
||||
.replace(/^::(group|endgroup|error|warning|set-output|add-matcher|remove-matcher).*$/gm, "");
|
||||
|
||||
if (failing_output.length > 1024 * 64) {
|
||||
failing_output = failing_output.slice(0, 1024 * 64) + `\n\n[truncated output (length: ${failing_output.length})]`;
|
||||
}
|
||||
|
||||
report += failing_output;
|
||||
|
||||
report += "```\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
writeFileSync("test-report.md", report);
|
||||
writeFileSync(
|
||||
"test-report.json",
|
||||
JSON.stringify({
|
||||
failing_tests,
|
||||
passing_tests,
|
||||
}),
|
||||
);
|
||||
|
||||
function mabeCapitalize(str) {
|
||||
str = str.toLowerCase();
|
||||
if (str.includes("arm64") || str.includes("aarch64")) {
|
||||
return str.toUpperCase();
|
||||
}
|
||||
|
||||
if (str.includes("x64")) {
|
||||
return "x64";
|
||||
}
|
||||
|
||||
if (str.includes("baseline")) {
|
||||
return str;
|
||||
}
|
||||
|
||||
return str[0].toUpperCase() + str.slice(1);
|
||||
}
|
||||
|
||||
console.log("-> test-report.md, test-report.json");
|
||||
function linkify(text, url) {
|
||||
if (url?.startsWith?.("https://")) {
|
||||
return `[${text}](${url})`;
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
if (ci) {
|
||||
if (failing_tests.length > 0) {
|
||||
action.setFailed(`${failing_tests.length} files with failing tests`);
|
||||
}
|
||||
action.setOutput("failing_tests", failingTestDisplay);
|
||||
action.setOutput("failing_tests_count", failing_tests.length);
|
||||
if (failing_tests.length) {
|
||||
const { env } = process;
|
||||
const tag = process.env.BUN_TAG || "unknown";
|
||||
const url = `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}/actions/runs/${env.GITHUB_RUN_ID}`;
|
||||
|
||||
let comment = `## ${linkify(`${emojiTag(tag)}${failing_tests.length} failing tests`, url)} ${tag
|
||||
.split("-")
|
||||
.map(mabeCapitalize)
|
||||
.join(" ")}
|
||||
|
||||
${failingTestDisplay}
|
||||
|
||||
`;
|
||||
writeFileSync("comment.md", comment);
|
||||
}
|
||||
let truncated_report = report;
|
||||
if (truncated_report.length > 512 * 1000) {
|
||||
truncated_report = truncated_report.slice(0, 512 * 1000) + "\n\n...truncated...";
|
||||
}
|
||||
action.summary.addRaw(truncated_report);
|
||||
await action.summary.write();
|
||||
}
|
||||
|
||||
function emojiTag(tag) {
|
||||
let emojiText = "";
|
||||
tag = tag.toLowerCase();
|
||||
if (tag.includes("win32") || tag.includes("windows")) {
|
||||
emojiText += "🪟";
|
||||
}
|
||||
|
||||
if (tag.includes("linux")) {
|
||||
emojiText += "🐧";
|
||||
}
|
||||
|
||||
if (tag.includes("macos") || tag.includes("darwin")) {
|
||||
emojiText += "";
|
||||
}
|
||||
|
||||
if (tag.includes("x86") || tag.includes("x64") || tag.includes("_64") || tag.includes("amd64")) {
|
||||
if (!tag.includes("linux")) {
|
||||
emojiText += "💻";
|
||||
} else {
|
||||
emojiText += "🖥";
|
||||
}
|
||||
}
|
||||
|
||||
if (tag.includes("arm64") || tag.includes("aarch64")) {
|
||||
emojiText += "💪";
|
||||
}
|
||||
|
||||
if (emojiText) {
|
||||
emojiText += " ";
|
||||
}
|
||||
|
||||
return emojiText;
|
||||
}
|
||||
|
||||
process.exit(failing_tests.length ? 1 : process.exitCode);
|
||||
25
packages/bun-internal-test/tsconfig.json
Normal file
25
packages/bun-internal-test/tsconfig.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"noEmit": true,
|
||||
"lib": ["ESNext"],
|
||||
"module": "ESNext",
|
||||
"target": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"jsx": "preserve",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"types": ["bun-types"],
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"packages": ["resources/packages.json"],
|
||||
"qunit": ["runners/qunit/qunit.ts"],
|
||||
"bun-test": ["types/bun-test.d.ts"],
|
||||
"runner": ["runners/bun/runner.ts"],
|
||||
"html": ["scripts/html.ts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
20
packages/bun-internal-test/types/bun-test.d.ts
vendored
Normal file
20
packages/bun-internal-test/types/bun-test.d.ts
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { afterAll, afterEach, beforeAll, beforeEach, describe, Expect, test } from "bun:test";
|
||||
|
||||
export type BunExpect = (value: unknown) => Expect;
|
||||
export type BunDescribe = typeof describe;
|
||||
export type BunTest = typeof test;
|
||||
export type BunHook = typeof beforeAll | typeof beforeEach | typeof afterAll | typeof afterEach;
|
||||
|
||||
export type TestContext = {
|
||||
expect: BunExpect;
|
||||
describe: BunDescribe;
|
||||
test: BunTest;
|
||||
beforeAll: BunHook;
|
||||
beforeEach: BunHook;
|
||||
afterAll: BunHook;
|
||||
afterEach: BunHook;
|
||||
};
|
||||
|
||||
declare module "bun" {
|
||||
function jest(path: string): TestContext;
|
||||
}
|
||||
13
packages/bun-types/bun.d.ts
vendored
13
packages/bun-types/bun.d.ts
vendored
@@ -2512,19 +2512,6 @@ declare module "bun" {
|
||||
* This defaults to `true`.
|
||||
*/
|
||||
throw?: boolean;
|
||||
|
||||
/**
|
||||
* Custom tsconfig.json file path to use for path resolution.
|
||||
* Equivalent to `--tsconfig-override` in the CLI.
|
||||
* @example
|
||||
* ```ts
|
||||
* await Bun.build({
|
||||
* entrypoints: ['./src/index.ts'],
|
||||
* tsconfig: './custom-tsconfig.json'
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
tsconfig?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -27,17 +27,11 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
- Run scripts from package.json
|
||||
- Visual lockfile viewer for old binary lockfiles (`bun.lockb`)
|
||||
|
||||
## Bun test runner integration
|
||||
|
||||
Run and debug tests directly from VSCode's Testing panel. The extension automatically discovers test files, shows inline test status, and provides rich error messages with diffs.
|
||||
|
||||

|
||||
|
||||
## In-editor error messages
|
||||
|
||||
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
|
||||
|
||||

|
||||

|
||||
|
||||
<div align="center">
|
||||
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
|
||||
@@ -101,9 +95,6 @@ You can use the following configurations to debug JavaScript and TypeScript file
|
||||
// The URL of the WebSocket inspector to attach to.
|
||||
// This value can be retrieved by using `bun --inspect`.
|
||||
"url": "ws://localhost:6499/",
|
||||
// Optional path mapping for remote debugging
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/app",
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -125,9 +116,6 @@ You can use the following configurations to customize the behavior of the Bun ex
|
||||
"bun.debugTerminal.stopOnEntry": false,
|
||||
|
||||
// Glob pattern to find test files. Defaults to the value shown below.
|
||||
"bun.test.filePattern": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts,cjs,mjs}",
|
||||
|
||||
// The custom script to call for testing instead of `bun test`
|
||||
"bun.test.customScript": "bun test",
|
||||
"bun.test.filePattern": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
}
|
||||
```
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 6.7 MiB |
|
Before Width: | Height: | Size: 462 KiB After Width: | Height: | Size: 462 KiB |
@@ -1,70 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
// Simple passing test
|
||||
test("adds numbers correctly", () => {
|
||||
expect(1 + 2).toBe(3);
|
||||
});
|
||||
|
||||
// Simple failing test
|
||||
test("subtracts numbers incorrectly", () => {
|
||||
expect(5 - 2).toBe(10); // This will fail
|
||||
});
|
||||
|
||||
describe("isEmail", () => {
|
||||
test("valid emails", () => {
|
||||
expect(isEmail("test@example.com")).toBe(true);
|
||||
expect(isEmail("foo.bar@domain.co")).toBe(true);
|
||||
});
|
||||
|
||||
test("invalid emails", () => {
|
||||
expect(isEmail("not-an-email")).toBe(false);
|
||||
expect(isEmail("missing@at")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// Nested describe
|
||||
describe("Array utilities", () => {
|
||||
function sum(arr: number[]): number {
|
||||
return arr.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
// describe()
|
||||
describe("sum()", () => {
|
||||
test(
|
||||
"sums positive numbers",
|
||||
async () => {
|
||||
await Bun.sleep(10000);
|
||||
expect(sum([1, 2, 3])).toBe(7);
|
||||
},
|
||||
{ timeout: 10 },
|
||||
); // Custom timeout
|
||||
|
||||
test.skip("sums negative numbers", () => {
|
||||
expect(sum([-1, -2, -3])).toBe(-6);
|
||||
});
|
||||
|
||||
test("empty array returns 0", () => {
|
||||
expect(sum([])).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// test.each example
|
||||
describe("multiply", () => {
|
||||
function multiply(a: number, b: number) {
|
||||
return a * b;
|
||||
}
|
||||
|
||||
test.each([
|
||||
[2, 3, 6],
|
||||
[0, 5, 0],
|
||||
[-1, 8, -8],
|
||||
[7, -2, -14],
|
||||
[2, 2, 5],
|
||||
])("multiply(%i, %i) === %i", (a, b, expected) => {
|
||||
expect(multiply(a, b)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
function isEmail(str: string): boolean {
|
||||
return /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(str);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "bun-vscode",
|
||||
"version": "0.0.29",
|
||||
"version": "0.0.25",
|
||||
"author": "oven",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -71,7 +71,7 @@
|
||||
},
|
||||
"bun.test.filePattern": {
|
||||
"type": "string",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts,cjs,mjs}",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
"description": "Glob pattern to find test files"
|
||||
},
|
||||
"bun.test.customFlag": {
|
||||
@@ -81,14 +81,8 @@
|
||||
},
|
||||
"bun.test.customScript": {
|
||||
"type": "string",
|
||||
"default": "bun test",
|
||||
"default": "",
|
||||
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
|
||||
},
|
||||
"bun.test.enable": {
|
||||
"type": "boolean",
|
||||
"description": "If the test explorer should be enabled and integrated with your editor",
|
||||
"scope": "window",
|
||||
"default": true
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -283,14 +277,6 @@
|
||||
"type": "boolean",
|
||||
"description": "If the debugger should stop on the first line of the program.",
|
||||
"default": false
|
||||
},
|
||||
"localRoot": {
|
||||
"type": "string",
|
||||
"description": "The local path that maps to \"remoteRoot\" when attaching to a remote Bun process."
|
||||
},
|
||||
"remoteRoot": {
|
||||
"type": "string",
|
||||
"description": "The remote path to the code when attaching. File paths reported by Bun that start with this path will be mapped back to 'localRoot'."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { registerDiagnosticsSocket } from "./features/diagnostics/diagnostics";
|
||||
import { registerBunlockEditor } from "./features/lockfile";
|
||||
import { registerPackageJsonProviders } from "./features/tasks/package.json";
|
||||
import { registerTaskProvider } from "./features/tasks/tasks";
|
||||
import { registerTests } from "./features/tests";
|
||||
import { registerTestCodeLens, registerTestRunner } from "./features/tests";
|
||||
|
||||
async function runUnsavedCode() {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
@@ -47,7 +47,8 @@ export function activate(context: vscode.ExtensionContext) {
|
||||
registerTaskProvider(context);
|
||||
registerPackageJsonProviders(context);
|
||||
registerDiagnosticsSocket(context);
|
||||
registerTests(context);
|
||||
registerTestRunner(context);
|
||||
registerTestCodeLens(context);
|
||||
|
||||
// Only register for text editors
|
||||
context.subscriptions.push(vscode.commands.registerTextEditorCommand("extension.bun.runUnsavedCode", runUnsavedCode));
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { DebugSession, OutputEvent } from "@vscode/debugadapter";
|
||||
import { tmpdir } from "node:os";
|
||||
import * as path from "node:path";
|
||||
import { join } from "node:path";
|
||||
import * as vscode from "vscode";
|
||||
import {
|
||||
@@ -221,7 +220,7 @@ class InlineDebugAdapterFactory implements vscode.DebugAdapterDescriptorFactory
|
||||
session: vscode.DebugSession,
|
||||
): Promise<vscode.ProviderResult<vscode.DebugAdapterDescriptor>> {
|
||||
const { configuration } = session;
|
||||
const { request, url, __untitledName, localRoot, remoteRoot } = configuration;
|
||||
const { request, url, __untitledName } = configuration;
|
||||
|
||||
if (request === "attach") {
|
||||
for (const [adapterUrl, adapter] of adapters) {
|
||||
@@ -231,10 +230,7 @@ class InlineDebugAdapterFactory implements vscode.DebugAdapterDescriptorFactory
|
||||
}
|
||||
}
|
||||
|
||||
const adapter = new FileDebugSession(session.id, __untitledName, {
|
||||
localRoot,
|
||||
remoteRoot,
|
||||
});
|
||||
const adapter = new FileDebugSession(session.id, __untitledName);
|
||||
await adapter.initialize();
|
||||
return new vscode.DebugAdapterInlineImplementation(adapter);
|
||||
}
|
||||
@@ -279,11 +275,6 @@ interface RuntimeExceptionThrownEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface PathMapping {
|
||||
localRoot?: string;
|
||||
remoteRoot?: string;
|
||||
}
|
||||
|
||||
class FileDebugSession extends DebugSession {
|
||||
// If these classes are moved/published, we should make sure
|
||||
// we remove these non-null assertions so consumers of
|
||||
@@ -292,60 +283,18 @@ class FileDebugSession extends DebugSession {
|
||||
sessionId?: string;
|
||||
untitledDocPath?: string;
|
||||
bunEvalPath?: string;
|
||||
localRoot?: string;
|
||||
remoteRoot?: string;
|
||||
#isWindowsRemote = false;
|
||||
|
||||
constructor(sessionId?: string, untitledDocPath?: string, mapping?: PathMapping) {
|
||||
constructor(sessionId?: string, untitledDocPath?: string) {
|
||||
super();
|
||||
this.sessionId = sessionId;
|
||||
this.untitledDocPath = untitledDocPath;
|
||||
|
||||
if (mapping) {
|
||||
this.localRoot = mapping.localRoot;
|
||||
this.remoteRoot = mapping.remoteRoot;
|
||||
if (typeof mapping.remoteRoot === "string") {
|
||||
this.#isWindowsRemote = mapping.remoteRoot.includes("\\");
|
||||
}
|
||||
}
|
||||
|
||||
if (untitledDocPath) {
|
||||
const cwd = vscode.workspace.workspaceFolders?.[0]?.uri?.fsPath ?? process.cwd();
|
||||
this.bunEvalPath = join(cwd, "[eval]");
|
||||
}
|
||||
}
|
||||
|
||||
mapRemoteToLocal(p: string | undefined): string | undefined {
|
||||
if (!p || !this.remoteRoot || !this.localRoot) return p;
|
||||
const remoteModule = this.#isWindowsRemote ? path.win32 : path.posix;
|
||||
let remoteRoot = remoteModule.normalize(this.remoteRoot);
|
||||
if (!remoteRoot.endsWith(remoteModule.sep)) remoteRoot += remoteModule.sep;
|
||||
let target = remoteModule.normalize(p);
|
||||
const starts = this.#isWindowsRemote
|
||||
? target.toLowerCase().startsWith(remoteRoot.toLowerCase())
|
||||
: target.startsWith(remoteRoot);
|
||||
if (starts) {
|
||||
const rel = target.slice(remoteRoot.length);
|
||||
const localRel = rel.split(remoteModule.sep).join(path.sep);
|
||||
return path.join(this.localRoot, localRel);
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
mapLocalToRemote(p: string | undefined): string | undefined {
|
||||
if (!p || !this.remoteRoot || !this.localRoot) return p;
|
||||
let localRoot = path.normalize(this.localRoot);
|
||||
if (!localRoot.endsWith(path.sep)) localRoot += path.sep;
|
||||
let localPath = path.normalize(p);
|
||||
if (localPath.startsWith(localRoot)) {
|
||||
const rel = localPath.slice(localRoot.length);
|
||||
const remoteModule = this.#isWindowsRemote ? path.win32 : path.posix;
|
||||
const remoteRel = rel.split(path.sep).join(remoteModule.sep);
|
||||
return remoteModule.join(this.remoteRoot, remoteRel);
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
const uniqueId = this.sessionId ?? Math.random().toString(36).slice(2);
|
||||
const url =
|
||||
@@ -358,20 +307,14 @@ class FileDebugSession extends DebugSession {
|
||||
|
||||
if (untitledDocPath) {
|
||||
this.adapter.on("Adapter.response", (response: DebugProtocolResponse) => {
|
||||
if (response.body?.source?.path) {
|
||||
if (response.body.source.path === bunEvalPath) {
|
||||
response.body.source.path = untitledDocPath;
|
||||
} else {
|
||||
response.body.source.path = this.mapRemoteToLocal(response.body.source.path);
|
||||
}
|
||||
if (response.body?.source?.path === bunEvalPath) {
|
||||
response.body.source.path = untitledDocPath;
|
||||
}
|
||||
if (Array.isArray(response.body?.breakpoints)) {
|
||||
for (const bp of response.body.breakpoints) {
|
||||
if (bp.source?.path === bunEvalPath) {
|
||||
bp.source.path = untitledDocPath;
|
||||
bp.verified = true;
|
||||
} else if (bp.source?.path) {
|
||||
bp.source.path = this.mapRemoteToLocal(bp.source.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -379,35 +322,14 @@ class FileDebugSession extends DebugSession {
|
||||
});
|
||||
|
||||
this.adapter.on("Adapter.event", (event: DebugProtocolEvent) => {
|
||||
if (event.body?.source?.path) {
|
||||
if (event.body.source.path === bunEvalPath) {
|
||||
event.body.source.path = untitledDocPath;
|
||||
} else {
|
||||
event.body.source.path = this.mapRemoteToLocal(event.body.source.path);
|
||||
}
|
||||
if (event.body?.source?.path === bunEvalPath) {
|
||||
event.body.source.path = untitledDocPath;
|
||||
}
|
||||
this.sendEvent(event);
|
||||
});
|
||||
} else {
|
||||
this.adapter.on("Adapter.response", (response: DebugProtocolResponse) => {
|
||||
if (response.body?.source?.path) {
|
||||
response.body.source.path = this.mapRemoteToLocal(response.body.source.path);
|
||||
}
|
||||
if (Array.isArray(response.body?.breakpoints)) {
|
||||
for (const bp of response.body.breakpoints) {
|
||||
if (bp.source?.path) {
|
||||
bp.source.path = this.mapRemoteToLocal(bp.source.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.sendResponse(response);
|
||||
});
|
||||
this.adapter.on("Adapter.event", (event: DebugProtocolEvent) => {
|
||||
if (event.body?.source?.path) {
|
||||
event.body.source.path = this.mapRemoteToLocal(event.body.source.path);
|
||||
}
|
||||
this.sendEvent(event);
|
||||
});
|
||||
this.adapter.on("Adapter.response", response => this.sendResponse(response));
|
||||
this.adapter.on("Adapter.event", event => this.sendEvent(event));
|
||||
}
|
||||
|
||||
this.adapter.on("Adapter.reverseRequest", ({ command, arguments: args }) =>
|
||||
@@ -423,15 +345,11 @@ class FileDebugSession extends DebugSession {
|
||||
if (type === "request") {
|
||||
const { untitledDocPath, bunEvalPath } = this;
|
||||
const { command } = message;
|
||||
if (command === "setBreakpoints" || command === "breakpointLocations") {
|
||||
if (untitledDocPath && (command === "setBreakpoints" || command === "breakpointLocations")) {
|
||||
const args = message.arguments as any;
|
||||
if (untitledDocPath && args.source?.path === untitledDocPath) {
|
||||
if (args.source?.path === untitledDocPath) {
|
||||
args.source.path = bunEvalPath;
|
||||
} else if (args.source?.path) {
|
||||
args.source.path = this.mapLocalToRemote(args.source.path);
|
||||
}
|
||||
} else if (command === "source" && message.arguments?.source?.path) {
|
||||
message.arguments.source.path = this.mapLocalToRemote(message.arguments.source.path);
|
||||
}
|
||||
|
||||
this.adapter.emit("Adapter.request", message);
|
||||
@@ -449,7 +367,7 @@ class TerminalDebugSession extends FileDebugSession {
|
||||
signal!: TCPSocketSignal | UnixSignal;
|
||||
|
||||
constructor() {
|
||||
super(undefined, undefined);
|
||||
super();
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { MockTestController, MockWorkspaceFolder } from "./vscode-types.mock";
|
||||
import "./vscode.mock";
|
||||
import { makeTestController, makeWorkspaceFolder } from "./vscode.mock";
|
||||
|
||||
const { BunTestController } = await import("../bun-test-controller");
|
||||
|
||||
const mockTestController: MockTestController = makeTestController();
|
||||
const mockWorkspaceFolder: MockWorkspaceFolder = makeWorkspaceFolder("/test/workspace");
|
||||
|
||||
const controller = new BunTestController(mockTestController, mockWorkspaceFolder, true);
|
||||
const internal = controller._internal;
|
||||
|
||||
const { expandEachTests, parseTestBlocks, getBraceDepth } = internal;
|
||||
|
||||
describe("BunTestController (static file parser)", () => {
|
||||
describe("expandEachTests", () => {
|
||||
describe("$variable syntax", () => {
|
||||
test("should not expand $variable patterns (Bun behavior)", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1, b: 2, expected: 3 },
|
||||
{ a: 5, b: 5, expected: 10 }
|
||||
])('$a + $b = $expected', ({ a, b, expected }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a + $b = $expected", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a + $b = $expected");
|
||||
});
|
||||
|
||||
test("should not expand string values with quotes", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Alice", city: "NYC" },
|
||||
{ name: "Bob", city: "LA" }
|
||||
])('$name from $city', ({ name, city }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name from $city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name from $city");
|
||||
});
|
||||
|
||||
test("should not expand nested property access", () => {
|
||||
const content = `test.each([
|
||||
{ user: { name: "Alice", profile: { city: "NYC" } } },
|
||||
{ user: { name: "Bob", profile: { city: "LA" } } }
|
||||
])('$user.name from $user.profile.city', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$user.name from $user.profile.city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$user.name from $user.profile.city");
|
||||
});
|
||||
|
||||
test("should not expand array indexing", () => {
|
||||
const content = `test.each([
|
||||
{ users: [{ name: "Alice" }, { name: "Bob" }] },
|
||||
{ users: [{ name: "Carol" }, { name: "Dave" }] }
|
||||
])('first user: $users.0.name', ({ users }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "first user: $users.0.name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("first user: $users.0.name");
|
||||
});
|
||||
|
||||
test("should return template as-is for missing properties", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 }
|
||||
])('$a and $missing', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a and $missing", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a and $missing");
|
||||
});
|
||||
|
||||
test("should handle edge cases with special identifiers", () => {
|
||||
const content = `test.each([
|
||||
{ _valid: "ok", $dollar: "yes", _123mix: "mixed" }
|
||||
])('$_valid | $$dollar | $_123mix', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$_valid | $$dollar | $_123mix", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$_valid | $$dollar | $_123mix");
|
||||
});
|
||||
|
||||
test("should handle invalid identifiers as literals", () => {
|
||||
const content = `test.each([
|
||||
{ valid: "test" }
|
||||
])('$valid | $123invalid | $has-dash', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$valid | $123invalid | $has-dash", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$valid | $123invalid | $has-dash");
|
||||
});
|
||||
});
|
||||
|
||||
describe("% formatters", () => {
|
||||
test("should handle %i for integers", () => {
|
||||
const content = `test.each([
|
||||
[1, 2, 3],
|
||||
[5, 5, 10]
|
||||
])('%i + %i = %i', (a, b, expected) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i + %i = %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1 + 2 = 3");
|
||||
expect(result[1].name).toBe("5 + 5 = 10");
|
||||
});
|
||||
|
||||
test("should handle %s for strings", () => {
|
||||
const content = `test.each([
|
||||
["hello", "world"],
|
||||
["foo", "bar"]
|
||||
])('%s %s', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("hello world");
|
||||
expect(result[1].name).toBe("foo bar");
|
||||
});
|
||||
|
||||
test("should handle %f and %d for numbers", () => {
|
||||
const content = `test.each([
|
||||
[1.5, 2.7],
|
||||
[3.14, 2.71]
|
||||
])('%f and %d', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%f and %d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1.5 and 2.7");
|
||||
expect(result[1].name).toBe("3.14 and 2.71");
|
||||
});
|
||||
|
||||
test("should handle %o and %j for objects", () => {
|
||||
const content = `test.each([
|
||||
[{ a: 1 }, { b: 2 }]
|
||||
])('%o and %j', (obj1, obj2) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%o and %j", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("%o and %j");
|
||||
});
|
||||
|
||||
test("should handle %# for index", () => {
|
||||
const content = `test.each([
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
[5, 6]
|
||||
])('Test #%#: %i + %i', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test #%#: %i + %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("Test #1: 1 + 2");
|
||||
expect(result[1].name).toBe("Test #2: 3 + 4");
|
||||
expect(result[2].name).toBe("Test #3: 5 + 6");
|
||||
});
|
||||
|
||||
test("should handle %% for literal percent", () => {
|
||||
const content = `test.each([
|
||||
[50],
|
||||
[100]
|
||||
])('%i%% complete', (percent) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i%% complete", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("50% complete");
|
||||
expect(result[1].name).toBe("100% complete");
|
||||
});
|
||||
});
|
||||
|
||||
describe("describe.each", () => {
|
||||
test("should work with describe.each", () => {
|
||||
const content = `describe.each([
|
||||
{ module: "fs", method: "readFile" },
|
||||
{ module: "path", method: "join" }
|
||||
])('$module module', ({ module, method }) => {})`;
|
||||
|
||||
const result = expandEachTests("describe.each([", "$module module", content, 0, "describe", 1);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$module module");
|
||||
expect(result[0].type).toBe("describe");
|
||||
});
|
||||
});
|
||||
|
||||
describe("error handling", () => {
|
||||
test("should handle non-.each tests", () => {
|
||||
const result = expandEachTests("test", "regular test", "test('regular test', () => {})", 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("regular test");
|
||||
});
|
||||
|
||||
test("should handle malformed JSON", () => {
|
||||
const content = `test.each([
|
||||
{ invalid json }
|
||||
])('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
|
||||
test("should handle non-array values", () => {
|
||||
const content = `test.each({ not: "array" })('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("mixed formatters", () => {
|
||||
test("should handle both $ and % in objects", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Test", index: 0 }
|
||||
])('$name #%#', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name #%#", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name #%#");
|
||||
});
|
||||
});
|
||||
|
||||
describe("edge cases", () => {
|
||||
test("should handle complex nested objects", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
user: {
|
||||
profile: {
|
||||
address: {
|
||||
city: "NYC",
|
||||
coords: { lat: 40.7128, lng: -74.0060 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('User from $user.profile.address.city at $user.profile.address.coords.lat', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests(
|
||||
"test.each([",
|
||||
"User from $user.profile.address.city at $user.profile.address.coords.lat",
|
||||
content,
|
||||
0,
|
||||
"test",
|
||||
1,
|
||||
);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("User from $user.profile.address.city at $user.profile.address.coords.lat");
|
||||
});
|
||||
|
||||
test("should handle arrays with inline comments", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }, // first test
|
||||
{ a: 2 }, // second test
|
||||
// { a: 3 }, // commented out test
|
||||
{ a: 4 } /* final test */
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with multiline comments", () => {
|
||||
const content = `test.each([
|
||||
{ name: "test1" },
|
||||
/* This is a
|
||||
multiline comment
|
||||
that spans several lines */
|
||||
{ name: "test2" },
|
||||
/**
|
||||
* JSDoc style comment
|
||||
* with multiple lines
|
||||
*/
|
||||
{ name: "test3" }
|
||||
])('$name', ({ name }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name");
|
||||
});
|
||||
|
||||
test("should handle malformed array syntax gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2,,, }, // extra commas
|
||||
{ a: 3, }, // trailing comma
|
||||
{ a: 4 },,, // extra trailing commas
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle strings with comment-like content", () => {
|
||||
const content = `test.each([
|
||||
{ comment: "// this is not a comment" },
|
||||
{ comment: "/* neither is this */" },
|
||||
{ url: "https://example.com/path" }
|
||||
])('Test: $comment $url', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test: $comment $url", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Test: $comment $url");
|
||||
});
|
||||
|
||||
test("should handle special characters in strings", () => {
|
||||
const content = `test.each([
|
||||
{ char: "\\n" },
|
||||
{ char: "\\t" },
|
||||
{ char: "\\"" },
|
||||
{ char: "\\'" },
|
||||
{ char: "\\\\" },
|
||||
{ char: "\`" }
|
||||
])('Special char: $char', ({ char }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Special char: $char", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle empty arrays", () => {
|
||||
const content = `test.each([])('should handle empty', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "should handle empty", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("should handle undefined and null values", () => {
|
||||
const content = `test.each([
|
||||
{ value: undefined },
|
||||
{ value: null },
|
||||
{ value: false },
|
||||
{ value: 0 },
|
||||
{ value: "" }
|
||||
])('Value: $value', ({ value }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $value", content, 0, "test", 1);
|
||||
|
||||
if (result.length === 1) {
|
||||
expect(result[0].name).toBe("Value: $value");
|
||||
} else {
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("Value: undefined");
|
||||
expect(result[1].name).toBe("Value: null");
|
||||
expect(result[2].name).toBe("Value: false");
|
||||
expect(result[3].name).toBe("Value: 0");
|
||||
expect(result[4].name).toBe("Value: ");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle circular references gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: "[Circular]" } },
|
||||
{ a: { b: { c: "[Circular]" } } }
|
||||
])('Circular: $a.b', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Circular: $a.b", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Circular: $a.b");
|
||||
});
|
||||
|
||||
test("should handle very long property paths", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
a: {
|
||||
b: {
|
||||
c: {
|
||||
d: {
|
||||
e: {
|
||||
f: {
|
||||
g: "deeply nested"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('Value: $a.b.c.d.e.f.g', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $a.b.c.d.e.f.g", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Value: $a.b.c.d.e.f.g");
|
||||
});
|
||||
|
||||
test("should handle syntax errors in array", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }
|
||||
{ a: 2 } // missing comma
|
||||
{ a: 3 }
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with trailing commas", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 },
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle mixed data types in arrays", () => {
|
||||
const content = `test.each([
|
||||
["string", 123, true, null, undefined],
|
||||
[{ obj: true }, [1, 2, 3], new Date("2024-01-01")]
|
||||
])('test %s %i %s %s %s', (...args) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test %s %i %s %s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test %s %i %s %s %s");
|
||||
});
|
||||
|
||||
test("should handle regex-like strings", () => {
|
||||
const content = `test.each([
|
||||
{ pattern: "/^test.*$/" },
|
||||
{ pattern: "\\\\d{3}-\\\\d{4}" },
|
||||
{ pattern: "[a-zA-Z]+" }
|
||||
])('Pattern: $pattern', ({ pattern }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Pattern: $pattern", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Pattern: $pattern");
|
||||
});
|
||||
|
||||
test("should handle invalid property access gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: null } },
|
||||
{ a: null },
|
||||
{ },
|
||||
{ a: { } }
|
||||
])('Access: $a.b.c.d', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Access: $a.b.c.d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Access: $a.b.c.d");
|
||||
});
|
||||
|
||||
test("should handle object methods and computed properties", () => {
|
||||
const content = `test.each([
|
||||
{ fn: function() {}, method() {}, arrow: () => {} },
|
||||
{ ["computed"]: "value", [Symbol.for("sym")]: "symbol" }
|
||||
])('Object with methods', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Object with methods", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseTestBlocks", () => {
|
||||
test("should parse simple test blocks", () => {
|
||||
const content = `
|
||||
test("should add numbers", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("should multiply numbers", () => {
|
||||
expect(2 * 3).toBe(6);
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should add numbers");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should multiply numbers");
|
||||
expect(result[1].type).toBe("test");
|
||||
});
|
||||
|
||||
test("should parse describe blocks with nested tests", () => {
|
||||
const content = `
|
||||
describe("Math operations", () => {
|
||||
test("addition", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("subtraction", () => {
|
||||
expect(5 - 3).toBe(2);
|
||||
});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Math operations");
|
||||
expect(result[0].type).toBe("describe");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("addition");
|
||||
expect(result[0].children[1].name).toBe("subtraction");
|
||||
});
|
||||
|
||||
test("should handle test modifiers", () => {
|
||||
const content = `
|
||||
test.skip("skipped test", () => {});
|
||||
test.todo("todo test", () => {});
|
||||
test.only("only test", () => {});
|
||||
test.failing("failing test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("skipped test");
|
||||
expect(result[1].name).toBe("todo test");
|
||||
expect(result[2].name).toBe("only test");
|
||||
expect(result[3].name).toBe("failing test");
|
||||
});
|
||||
|
||||
test("should handle conditional tests", () => {
|
||||
const content = `
|
||||
test.if(true)("conditional test", () => {});
|
||||
test.skipIf(false)("skip if test", () => {});
|
||||
test.todoIf(true)("todo if test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("conditional test");
|
||||
expect(result[1].name).toBe("skip if test");
|
||||
expect(result[2].name).toBe("todo if test");
|
||||
});
|
||||
|
||||
test("should ignore comments", () => {
|
||||
const content = `
|
||||
// This is a comment with test("fake test", () => {})
|
||||
/* Multi-line comment
|
||||
test("another fake test", () => {})
|
||||
*/
|
||||
test("real test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("real test");
|
||||
});
|
||||
|
||||
test("should handle nested describe blocks", () => {
|
||||
const content = `
|
||||
describe("Outer", () => {
|
||||
describe("Inner", () => {
|
||||
test("deeply nested", () => {});
|
||||
});
|
||||
test("shallow test", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Outer");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("Inner");
|
||||
expect(result[0].children[0].children).toHaveLength(1);
|
||||
expect(result[0].children[0].children[0].name).toBe("deeply nested");
|
||||
expect(result[0].children[1].name).toBe("shallow test");
|
||||
});
|
||||
|
||||
test("should handle it() as alias for test()", () => {
|
||||
const content = `
|
||||
it("should work with it", () => {});
|
||||
it.skip("should skip with it", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should work with it");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should skip with it");
|
||||
});
|
||||
|
||||
test("should handle different quote types", () => {
|
||||
const content = `
|
||||
test('single quotes', () => {});
|
||||
test("double quotes", () => {});
|
||||
test(\`template literals\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("single quotes");
|
||||
expect(result[1].name).toBe("double quotes");
|
||||
expect(result[2].name).toBe("template literals");
|
||||
});
|
||||
|
||||
test("should handle escaped quotes in test names", () => {
|
||||
const content = `
|
||||
test("test with \\"escaped\\" quotes", () => {});
|
||||
test('test with \\'escaped\\' quotes', () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe('test with "escaped" quotes');
|
||||
expect(result[1].name).toBe("test with 'escaped' quotes");
|
||||
});
|
||||
|
||||
test("should handle comments within test names", () => {
|
||||
const content = `
|
||||
test("test with // comment syntax", () => {});
|
||||
test("test with /* comment */ syntax", () => {});
|
||||
test("test with URL https://example.com", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const hasCommentSyntax = result.some(r => r.name.includes("comment syntax"));
|
||||
const hasURL = result.some(r => r.name.includes("https://example.com"));
|
||||
|
||||
expect(hasCommentSyntax || hasURL).toBe(true);
|
||||
});
|
||||
|
||||
test("should ignore code that looks like tests in strings", () => {
|
||||
const content = `
|
||||
const str = "test('fake test', () => {})";
|
||||
const template = \`describe("fake describe", () => {})\`;
|
||||
|
||||
// Real test
|
||||
test("real test", () => {
|
||||
const example = 'test("nested fake", () => {})';
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result.some(r => r.name === "real test")).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle tests with complex modifier chains", () => {
|
||||
const content = `
|
||||
test.skip.failing("skipped failing test", () => {});
|
||||
test.only.todo("only todo test", () => {});
|
||||
describe.skip.each([1, 2])("skip each %i", (n) => {});
|
||||
it.failing.each([{a: 1}])("failing each $a", ({a}) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("should handle weird spacing and formatting", () => {
|
||||
const content = `
|
||||
test ( "extra spaces" , ( ) => { } ) ;
|
||||
test
|
||||
(
|
||||
"multiline test"
|
||||
,
|
||||
(
|
||||
)
|
||||
=>
|
||||
{
|
||||
}
|
||||
)
|
||||
;
|
||||
test\t(\t"tabs"\t,\t()\t=>\t{}\t);
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("extra spaces");
|
||||
expect(result[1].name).toBe("multiline test");
|
||||
expect(result[2].name).toBe("tabs");
|
||||
});
|
||||
|
||||
test("should handle test.each with complex patterns", () => {
|
||||
const content = `
|
||||
test.each([
|
||||
[1, 2, 3],
|
||||
[4, 5, 9]
|
||||
])("when %i + %i, result should be %i", (a, b, expected) => {});
|
||||
|
||||
describe.each([
|
||||
{ db: "postgres" },
|
||||
{ db: "mysql" }
|
||||
])("Database $db", ({ db }) => {
|
||||
test("should connect", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("when 1 + 2, result should be 3");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("when 4 + 5, result should be 9");
|
||||
expect(result[1].type).toBe("test");
|
||||
expect(result[2].name).toBe("Database $db");
|
||||
expect(result[2].type).toBe("describe");
|
||||
});
|
||||
|
||||
test("should handle Unicode and emoji in test names", () => {
|
||||
const content = `
|
||||
test("测试中文", () => {});
|
||||
test("テスト日本語", () => {});
|
||||
test("тест русский", () => {});
|
||||
test("🚀 rocket test", () => {});
|
||||
test("Test with 🎉 celebration", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("测试中文");
|
||||
expect(result[1].name).toBe("テスト日本語");
|
||||
expect(result[2].name).toBe("тест русский");
|
||||
expect(result[3].name).toBe("🚀 rocket test");
|
||||
expect(result[4].name).toBe("Test with 🎉 celebration");
|
||||
});
|
||||
|
||||
test("should handle test names with interpolation-like syntax", () => {
|
||||
const content = `
|
||||
test("test with \${variable}", () => {});
|
||||
test("test with \$dollar", () => {});
|
||||
test("test with %percent", () => {});
|
||||
test(\`template literal test\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("test with ${variable}");
|
||||
expect(result[1].name).toBe("test with $dollar");
|
||||
expect(result[2].name).toBe("test with %percent");
|
||||
expect(result[3].name).toBe("template literal test");
|
||||
});
|
||||
|
||||
test("should handle async/await in test definitions", () => {
|
||||
const content = `
|
||||
test("sync test", () => {});
|
||||
test("async test", async () => {});
|
||||
test("test with await", async () => {
|
||||
await something();
|
||||
});
|
||||
it("async it", async function() {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("sync test");
|
||||
expect(result[1].name).toBe("async test");
|
||||
expect(result[2].name).toBe("test with await");
|
||||
expect(result[3].name).toBe("async it");
|
||||
});
|
||||
|
||||
test("should handle generator functions and other ES6+ syntax", () => {
|
||||
const content = `
|
||||
test("generator test", function* () {
|
||||
yield 1;
|
||||
});
|
||||
|
||||
test.each\`
|
||||
a | b | expected
|
||||
\${1} | \${1} | \${2}
|
||||
\${1} | \${2} | \${3}
|
||||
\`('$a + $b = $expected', ({ a, b, expected }) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result[0].name).toBe("generator test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBraceDepth", () => {
|
||||
test("should count braces correctly", () => {
|
||||
const content = "{ { } }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
expect(getBraceDepth(content, 0, 3)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 5)).toBe(1);
|
||||
});
|
||||
|
||||
test("should ignore braces in strings", () => {
|
||||
const content = '{ "string with { braces }" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should ignore braces in template literals", () => {
|
||||
const content = "{ `template with { braces }` }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle escaped quotes", () => {
|
||||
const content = '{ "escaped \\" quote" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle mixed quotes", () => {
|
||||
const content = `{ "double" + 'single' + \`template\` }`;
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle nested braces", () => {
|
||||
const content = "{ a: { b: { c: 1 } } }";
|
||||
expect(getBraceDepth(content, 0, 10)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 15)).toBe(3);
|
||||
});
|
||||
|
||||
test("should handle complex template literals", () => {
|
||||
const content = '{ `${foo({ bar: "baz" })} and ${nested.value}` }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle edge cases", () => {
|
||||
expect(getBraceDepth("", 0, 0)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{}}}", 0, 6)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{", 0, 3)).toBe(3);
|
||||
expect(getBraceDepth("}}}", 0, 3)).toBe(-3);
|
||||
|
||||
const templateContent = "{ `${foo}` + `${bar}` }";
|
||||
expect(getBraceDepth(templateContent, 0, templateContent.length)).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,570 +0,0 @@
|
||||
/**
|
||||
* Mock VSCode types and classes for testing
|
||||
* These should be as close as possible to the real VSCode API
|
||||
*/
|
||||
|
||||
export interface MockUri {
|
||||
readonly scheme: string;
|
||||
readonly authority: string;
|
||||
readonly path: string;
|
||||
readonly query: string;
|
||||
readonly fragment: string;
|
||||
readonly fsPath: string;
|
||||
toString(): string;
|
||||
}
|
||||
|
||||
export class MockUri implements MockUri {
|
||||
constructor(
|
||||
public readonly scheme: string,
|
||||
public readonly authority: string,
|
||||
public readonly path: string,
|
||||
public readonly query: string,
|
||||
public readonly fragment: string,
|
||||
public readonly fsPath: string,
|
||||
) {}
|
||||
|
||||
static file(path: string): MockUri {
|
||||
return new MockUri("file", "", path, "", "", path);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return `${this.scheme}://${this.authority}${this.path}`;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockPosition {
|
||||
constructor(
|
||||
public readonly line: number,
|
||||
public readonly character: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockRange {
|
||||
constructor(
|
||||
public readonly start: MockPosition,
|
||||
public readonly end: MockPosition,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockLocation {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly range: MockRange,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockTestTag {
|
||||
constructor(public readonly id: string) {}
|
||||
}
|
||||
|
||||
export class MockTestMessage {
|
||||
public location?: MockLocation;
|
||||
public actualOutput?: string;
|
||||
public expectedOutput?: string;
|
||||
|
||||
constructor(public message: string | MockMarkdownString) {}
|
||||
|
||||
static diff(message: string, expected: string, actual: string): MockTestMessage {
|
||||
const msg = new MockTestMessage(message);
|
||||
msg.expectedOutput = expected;
|
||||
msg.actualOutput = actual;
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockMarkdownString {
|
||||
constructor(public value: string = "") {}
|
||||
|
||||
appendCodeblock(code: string, language?: string): MockMarkdownString {
|
||||
this.value += `\n\`\`\`${language || ""}\n${code}\n\`\`\``;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendMarkdown(value: string): MockMarkdownString {
|
||||
this.value += value;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendText(value: string): MockMarkdownString {
|
||||
this.value += value.replace(/[\\`*_{}[\]()#+\-.!]/g, "\\$&");
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestItem {
|
||||
readonly id: string;
|
||||
readonly uri?: MockUri;
|
||||
readonly children: MockTestItemCollection;
|
||||
readonly parent?: MockTestItem;
|
||||
label: string;
|
||||
description?: string;
|
||||
tags: readonly MockTestTag[];
|
||||
canResolveChildren: boolean;
|
||||
busy: boolean;
|
||||
range?: MockRange;
|
||||
error?: string | MockMarkdownString;
|
||||
}
|
||||
|
||||
export interface MockTestItemCollection {
|
||||
readonly size: number;
|
||||
add(item: MockTestItem): void;
|
||||
replace(items: readonly MockTestItem[]): void;
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void;
|
||||
get(itemId: string): MockTestItem | undefined;
|
||||
delete(itemId: string): void;
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]>;
|
||||
}
|
||||
|
||||
export class MockTestItemCollection implements MockTestItemCollection {
|
||||
private items = new Map<string, MockTestItem>();
|
||||
|
||||
get size(): number {
|
||||
return this.items.size;
|
||||
}
|
||||
|
||||
add(item: MockTestItem): void {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
|
||||
replace(items: readonly MockTestItem[]): void {
|
||||
this.items.clear();
|
||||
for (const item of items) {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
}
|
||||
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void {
|
||||
this.items.forEach((item, id) => callback(item, id, this));
|
||||
}
|
||||
|
||||
get(itemId: string): MockTestItem | undefined {
|
||||
return this.items.get(itemId);
|
||||
}
|
||||
|
||||
delete(itemId: string): void {
|
||||
this.items.delete(itemId);
|
||||
}
|
||||
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]> {
|
||||
return this.items[Symbol.iterator]();
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
|
||||
set(id: string, item: MockTestItem): void {
|
||||
this.items.set(id, item);
|
||||
}
|
||||
|
||||
values(): IterableIterator<MockTestItem> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
keys(): IterableIterator<string> {
|
||||
return this.items.keys();
|
||||
}
|
||||
|
||||
entries(): IterableIterator<[string, MockTestItem]> {
|
||||
return this.items.entries();
|
||||
}
|
||||
}
|
||||
|
||||
export class MockTestItem implements MockTestItem {
|
||||
public canResolveChildren: boolean = false;
|
||||
public busy: boolean = false;
|
||||
public description?: string;
|
||||
public range?: MockRange;
|
||||
public error?: string | MockMarkdownString;
|
||||
public readonly children: MockTestItemCollection;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public label: string,
|
||||
public readonly uri?: MockUri,
|
||||
public readonly parent?: MockTestItem,
|
||||
public tags: readonly MockTestTag[] = [],
|
||||
) {
|
||||
this.children = new MockTestItemCollection();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestController {
|
||||
readonly items: MockTestItemCollection;
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem;
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile;
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun;
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void;
|
||||
resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
}
|
||||
|
||||
export class MockTestController implements MockTestController {
|
||||
public readonly items: MockTestItemCollection;
|
||||
public resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
public refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public readonly label: string,
|
||||
) {
|
||||
this.items = new MockTestItemCollection();
|
||||
}
|
||||
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem {
|
||||
return new MockTestItem(id, label, uri);
|
||||
}
|
||||
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile {
|
||||
return new MockTestRunProfile(label, kind, runHandler, isDefault);
|
||||
}
|
||||
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun {
|
||||
return new MockTestRun(name, persist);
|
||||
}
|
||||
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void {
|
||||
// Mock implementation - in real VSCode this would invalidate test results
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export enum MockTestRunProfileKind {
|
||||
Run = 1,
|
||||
Debug = 2,
|
||||
Coverage = 3,
|
||||
}
|
||||
|
||||
export interface MockTestRunProfile {
|
||||
readonly label: string;
|
||||
readonly kind: MockTestRunProfileKind;
|
||||
readonly isDefault: boolean;
|
||||
readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>;
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockTestRunProfile implements MockTestRunProfile {
|
||||
constructor(
|
||||
public readonly label: string,
|
||||
public readonly kind: MockTestRunProfileKind,
|
||||
public readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
public readonly isDefault: boolean = false,
|
||||
) {}
|
||||
|
||||
dispose(): void {
|
||||
// No-op for mock
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestRunRequest {
|
||||
readonly include?: readonly MockTestItem[];
|
||||
readonly exclude?: readonly MockTestItem[];
|
||||
readonly profile?: MockTestRunProfile;
|
||||
}
|
||||
|
||||
export class MockTestRunRequest implements MockTestRunRequest {
|
||||
constructor(
|
||||
public readonly include?: readonly MockTestItem[],
|
||||
public readonly exclude?: readonly MockTestItem[],
|
||||
public readonly profile?: MockTestRunProfile,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockTestRun {
|
||||
readonly name?: string;
|
||||
readonly token: MockCancellationToken;
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void;
|
||||
end(): void;
|
||||
enqueued(test: MockTestItem): void;
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
passed(test: MockTestItem, duration?: number): void;
|
||||
skipped(test: MockTestItem): void;
|
||||
started(test: MockTestItem): void;
|
||||
}
|
||||
|
||||
export class MockTestRun implements MockTestRun {
|
||||
public readonly token: MockCancellationToken;
|
||||
private _ended: boolean = false;
|
||||
|
||||
constructor(
|
||||
public readonly name?: string,
|
||||
public readonly persist: boolean = true,
|
||||
) {
|
||||
this.token = new MockCancellationToken();
|
||||
}
|
||||
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// For mock, just store output - in real VS Code this would appear in test output
|
||||
}
|
||||
|
||||
end(): void {
|
||||
this._ended = true;
|
||||
}
|
||||
|
||||
enqueued(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
passed(test: MockTestItem, duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
skipped(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
started(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockCancellationToken {
|
||||
readonly isCancellationRequested: boolean;
|
||||
onCancellationRequested(listener: () => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockCancellationToken implements MockCancellationToken {
|
||||
private _isCancellationRequested: boolean = false;
|
||||
private _listeners: (() => void)[] = [];
|
||||
|
||||
get isCancellationRequested(): boolean {
|
||||
return this._isCancellationRequested;
|
||||
}
|
||||
|
||||
onCancellationRequested(listener: () => void): MockDisposable {
|
||||
this._listeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._listeners.indexOf(listener);
|
||||
if (index >= 0) {
|
||||
this._listeners.splice(index, 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this._isCancellationRequested = true;
|
||||
this._listeners.forEach(listener => listener());
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockDisposable {
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockDisposable implements MockDisposable {
|
||||
constructor(private readonly disposeFunc?: () => void) {}
|
||||
|
||||
dispose(): void {
|
||||
this.disposeFunc?.();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTextDocument {
|
||||
readonly uri: MockUri;
|
||||
readonly fileName: string;
|
||||
readonly isUntitled: boolean;
|
||||
readonly languageId: string;
|
||||
readonly version: number;
|
||||
readonly isDirty: boolean;
|
||||
readonly isClosed: boolean;
|
||||
readonly eol: MockEndOfLine;
|
||||
readonly lineCount: number;
|
||||
getText(range?: MockRange): string;
|
||||
getWordRangeAtPosition(position: MockPosition, regex?: RegExp): MockRange | undefined;
|
||||
lineAt(line: number | MockPosition): MockTextLine;
|
||||
offsetAt(position: MockPosition): number;
|
||||
positionAt(offset: number): MockPosition;
|
||||
save(): Promise<boolean>;
|
||||
validatePosition(position: MockPosition): MockPosition;
|
||||
validateRange(range: MockRange): MockRange;
|
||||
}
|
||||
|
||||
export enum MockEndOfLine {
|
||||
LF = 1,
|
||||
CRLF = 2,
|
||||
}
|
||||
|
||||
export interface MockTextLine {
|
||||
readonly lineNumber: number;
|
||||
readonly text: string;
|
||||
readonly range: MockRange;
|
||||
readonly rangeIncludingLineBreak: MockRange;
|
||||
readonly firstNonWhitespaceCharacterIndex: number;
|
||||
readonly isEmptyOrWhitespace: boolean;
|
||||
}
|
||||
|
||||
export interface MockWorkspaceFolder {
|
||||
readonly uri: MockUri;
|
||||
readonly name: string;
|
||||
readonly index: number;
|
||||
}
|
||||
|
||||
export class MockWorkspaceFolder implements MockWorkspaceFolder {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly name: string,
|
||||
public readonly index: number = 0,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockFileSystemWatcher extends MockDisposable {
|
||||
readonly ignoreCreateEvents: boolean;
|
||||
readonly ignoreChangeEvents: boolean;
|
||||
readonly ignoreDeleteEvents: boolean;
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockFileSystemWatcher implements MockFileSystemWatcher {
|
||||
public readonly ignoreCreateEvents: boolean = false;
|
||||
public readonly ignoreChangeEvents: boolean = false;
|
||||
public readonly ignoreDeleteEvents: boolean = false;
|
||||
|
||||
private _createListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _changeListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _deleteListeners: ((uri: MockUri) => void)[] = [];
|
||||
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._createListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._createListeners.indexOf(listener);
|
||||
if (index >= 0) this._createListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._changeListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._changeListeners.indexOf(listener);
|
||||
if (index >= 0) this._changeListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._deleteListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._deleteListeners.indexOf(listener);
|
||||
if (index >= 0) this._deleteListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this._createListeners.length = 0;
|
||||
this._changeListeners.length = 0;
|
||||
this._deleteListeners.length = 0;
|
||||
}
|
||||
|
||||
// Helper methods for testing
|
||||
triggerCreate(uri: MockUri): void {
|
||||
this._createListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerChange(uri: MockUri): void {
|
||||
this._changeListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerDelete(uri: MockUri): void {
|
||||
this._deleteListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockRelativePattern {
|
||||
readonly base: string;
|
||||
readonly pattern: string;
|
||||
}
|
||||
|
||||
export class MockRelativePattern implements MockRelativePattern {
|
||||
constructor(
|
||||
public readonly base: string | MockWorkspaceFolder,
|
||||
public readonly pattern: string,
|
||||
) {}
|
||||
|
||||
get baseUri(): MockUri {
|
||||
if (typeof this.base === "string") {
|
||||
return MockUri.file(this.base);
|
||||
}
|
||||
return this.base.uri;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockConfiguration {
|
||||
get<T>(section: string, defaultValue?: T): T | undefined;
|
||||
has(section: string): boolean;
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined;
|
||||
update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void>;
|
||||
}
|
||||
|
||||
export interface MockConfigurationInspect<T> {
|
||||
readonly key: string;
|
||||
readonly defaultValue?: T;
|
||||
readonly globalValue?: T;
|
||||
readonly workspaceValue?: T;
|
||||
readonly workspaceFolderValue?: T;
|
||||
}
|
||||
|
||||
export enum MockConfigurationTarget {
|
||||
Global = 1,
|
||||
Workspace = 2,
|
||||
WorkspaceFolder = 3,
|
||||
}
|
||||
|
||||
export class MockConfiguration implements MockConfiguration {
|
||||
private _values = new Map<string, any>();
|
||||
|
||||
get<T>(section: string, defaultValue?: T): T | undefined {
|
||||
return this._values.get(section) ?? defaultValue;
|
||||
}
|
||||
|
||||
has(section: string): boolean {
|
||||
return this._values.has(section);
|
||||
}
|
||||
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined {
|
||||
return {
|
||||
key: section,
|
||||
defaultValue: undefined,
|
||||
globalValue: this._values.get(section),
|
||||
workspaceValue: undefined,
|
||||
workspaceFolderValue: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void> {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
|
||||
// Helper for testing
|
||||
setValue(section: string, value: any): void {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { mock } from "bun:test";
|
||||
import {
|
||||
MockConfiguration,
|
||||
MockDisposable,
|
||||
MockFileSystemWatcher,
|
||||
MockLocation,
|
||||
MockMarkdownString,
|
||||
MockPosition,
|
||||
MockRange,
|
||||
MockRelativePattern,
|
||||
MockTestController,
|
||||
MockTestMessage,
|
||||
MockTestRunProfileKind,
|
||||
MockTestTag,
|
||||
MockUri,
|
||||
MockWorkspaceFolder,
|
||||
} from "./vscode-types.mock";
|
||||
|
||||
mock.module("vscode", () => ({
|
||||
window: {
|
||||
createOutputChannel: () => ({
|
||||
appendLine: () => {},
|
||||
}),
|
||||
visibleTextEditors: [],
|
||||
},
|
||||
workspace: {
|
||||
getConfiguration: (section?: string) => new MockConfiguration(),
|
||||
onDidOpenTextDocument: () => new MockDisposable(),
|
||||
textDocuments: [],
|
||||
createFileSystemWatcher: (pattern: string | MockRelativePattern) => new MockFileSystemWatcher(),
|
||||
findFiles: async (include: string, exclude?: string, maxResults?: number, token?: any) => {
|
||||
return []; // Mock implementation
|
||||
},
|
||||
},
|
||||
Uri: MockUri,
|
||||
TestTag: MockTestTag,
|
||||
Position: MockPosition,
|
||||
Range: MockRange,
|
||||
Location: MockLocation,
|
||||
TestMessage: MockTestMessage,
|
||||
MarkdownString: MockMarkdownString,
|
||||
TestRunProfileKind: MockTestRunProfileKind,
|
||||
RelativePattern: MockRelativePattern,
|
||||
debug: {
|
||||
addBreakpoints: () => {},
|
||||
startDebugging: async () => true,
|
||||
},
|
||||
}));
|
||||
|
||||
export function makeTestController(): MockTestController {
|
||||
return new MockTestController("test-controller", "Test Controller");
|
||||
}
|
||||
|
||||
export function makeWorkspaceFolder(path: string): MockWorkspaceFolder {
|
||||
return new MockWorkspaceFolder(MockUri.file(path), path.split("/").pop() || "workspace", 0);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,29 +1,215 @@
|
||||
import ts from "typescript";
|
||||
import * as vscode from "vscode";
|
||||
import { BunTestController, debug } from "./bun-test-controller";
|
||||
|
||||
export async function registerTests(context: vscode.ExtensionContext) {
|
||||
const workspaceFolder = (vscode.workspace.workspaceFolders || [])[0];
|
||||
if (!workspaceFolder) {
|
||||
return;
|
||||
/**
|
||||
* Find all matching test via ts AST
|
||||
*/
|
||||
function findTests(document: vscode.TextDocument): Array<{ name: string; range: vscode.Range }> {
|
||||
const sourceFile = ts.createSourceFile(document.fileName, document.getText(), ts.ScriptTarget.Latest, true);
|
||||
const tests: Array<{ name: string; range: vscode.Range }> = [];
|
||||
|
||||
// Visit all nodes in the AST
|
||||
function visit(node: ts.Node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expressionText = node.expression.getText(sourceFile);
|
||||
|
||||
// Check if the expression is a test function
|
||||
const isTest = expressionText === "test" || expressionText === "describe" || expressionText === "it";
|
||||
|
||||
if (!isTest) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the test name from the first argument
|
||||
const testName = node.arguments[0] && ts.isStringLiteral(node.arguments[0]) ? node.arguments[0].text : null;
|
||||
if (!testName) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the range of the test function for the CodeLens
|
||||
const start = document.positionAt(node.getStart());
|
||||
const end = document.positionAt(node.getEnd());
|
||||
const range = new vscode.Range(start, end);
|
||||
tests.push({ name: testName, range });
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
const config = vscode.workspace.getConfiguration("bun.test");
|
||||
const enable = config.get<boolean>("enable", true);
|
||||
if (!enable) {
|
||||
return;
|
||||
}
|
||||
visit(sourceFile);
|
||||
return tests;
|
||||
}
|
||||
|
||||
try {
|
||||
const controller = vscode.tests.createTestController("bun", "Bun Tests");
|
||||
context.subscriptions.push(controller);
|
||||
/**
|
||||
* This class provides CodeLens for test functions in the editor - find all tests in current document and provide CodeLens for them.
|
||||
* It finds all test functions in the current document and provides CodeLens for them (Run Test, Watch Test buttons).
|
||||
*/
|
||||
class TestCodeLensProvider implements vscode.CodeLensProvider {
|
||||
public provideCodeLenses(document: vscode.TextDocument): vscode.CodeLens[] {
|
||||
const codeLenses: vscode.CodeLens[] = [];
|
||||
const tests = findTests(document);
|
||||
|
||||
const bunTestController = new BunTestController(controller, workspaceFolder);
|
||||
for (const test of tests) {
|
||||
const runTestCommand = {
|
||||
title: "Run Test",
|
||||
command: "extension.bun.runTest",
|
||||
arguments: [document.fileName, test.name],
|
||||
};
|
||||
|
||||
context.subscriptions.push(bunTestController);
|
||||
} catch (error) {
|
||||
debug.appendLine(`Error initializing Bun Test Controller: ${error}`);
|
||||
vscode.window.showErrorMessage(
|
||||
"Failed to initialize Bun Test Explorer. You may need to update VS Code to version 1.59 or later.",
|
||||
);
|
||||
const watchTestCommand = {
|
||||
title: "Watch Test",
|
||||
command: "extension.bun.watchTest",
|
||||
arguments: [document.fileName, test.name],
|
||||
};
|
||||
|
||||
codeLenses.push(new vscode.CodeLens(test.range, runTestCommand));
|
||||
codeLenses.push(new vscode.CodeLens(test.range, watchTestCommand));
|
||||
}
|
||||
|
||||
return codeLenses;
|
||||
}
|
||||
}
|
||||
|
||||
// default file pattern to search for tests
|
||||
const DEFAULT_FILE_PATTERN = "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}";
|
||||
|
||||
/**
|
||||
* This function registers a CodeLens provider for test files. It is used to display the "Run" and "Watch" buttons.
|
||||
*/
|
||||
export function registerTestCodeLens(context: vscode.ExtensionContext) {
|
||||
const codeLensProvider = new TestCodeLensProvider();
|
||||
|
||||
// Get the user-defined file pattern from the settings, or use the default
|
||||
// Setting is:
|
||||
// bun.test.filePattern
|
||||
const pattern = vscode.workspace.getConfiguration("bun.test").get("filePattern", DEFAULT_FILE_PATTERN);
|
||||
const options = { scheme: "file", pattern };
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.languages.registerCodeLensProvider({ ...options, language: "javascript" }, codeLensProvider),
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.languages.registerCodeLensProvider({ ...options, language: "typescript" }, codeLensProvider),
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.languages.registerCodeLensProvider({ ...options, language: "javascriptreact" }, codeLensProvider),
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.languages.registerCodeLensProvider({ ...options, language: "typescriptreact" }, codeLensProvider),
|
||||
);
|
||||
}
|
||||
|
||||
// Tracking only one active terminal, so there will be only one terminal running at a time.
|
||||
// Example: when user clicks "Run Test" button, the previous terminal will be disposed.
|
||||
let activeTerminal: vscode.Terminal | null = null;
|
||||
|
||||
/**
|
||||
* This function registers the test runner commands.
|
||||
*/
|
||||
export function registerTestRunner(context: vscode.ExtensionContext) {
|
||||
// Register the "Run Test" command
|
||||
const runTestCommand = vscode.commands.registerCommand(
|
||||
"extension.bun.runTest",
|
||||
async (filePath?: string, testName?: string, isWatchMode: boolean = false) => {
|
||||
// Get custom flag
|
||||
const customFlag = vscode.workspace.getConfiguration("bun.test").get("customFlag", "").trim();
|
||||
const customScriptSetting = vscode.workspace.getConfiguration("bun.test").get("customScript", "bun test").trim();
|
||||
|
||||
const customScript = customScriptSetting.length ? customScriptSetting : "bun test";
|
||||
|
||||
// When this command is called from the command palette, the fileName and testName arguments are not passed (commands in package.json)
|
||||
// so then fileName is taken from the active text editor and it run for the whole file.
|
||||
if (!filePath) {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
|
||||
if (!editor) {
|
||||
await vscode.window.showErrorMessage("No active editor to run tests in");
|
||||
return;
|
||||
}
|
||||
|
||||
filePath = editor.document.fileName;
|
||||
}
|
||||
|
||||
// Detect if along file path there is package.json, like in mono-repo, if so, then switch to that directory
|
||||
const packageJsonPaths = await vscode.workspace.findFiles("**/package.json");
|
||||
|
||||
// Sort by length, so the longest path is first, so we can switch to the deepest directory
|
||||
const packagesRootPaths = packageJsonPaths
|
||||
.map(uri => uri.fsPath.replace("/package.json", ""))
|
||||
.sort((a, b) => b.length - a.length);
|
||||
|
||||
const packageJsonPath: string | undefined = packagesRootPaths.find(path => filePath.includes(path));
|
||||
|
||||
if (activeTerminal) {
|
||||
activeTerminal.dispose();
|
||||
activeTerminal = null;
|
||||
}
|
||||
|
||||
const cwd = packageJsonPath ?? vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? process.cwd();
|
||||
|
||||
const message = isWatchMode
|
||||
? `Watching \x1b[1m\x1b[32m${testName ?? filePath}\x1b[0m test`
|
||||
: `Running \x1b[1m\x1b[32m${testName ?? filePath}\x1b[0m test`;
|
||||
|
||||
const terminalOptions: vscode.TerminalOptions = {
|
||||
cwd,
|
||||
name: "Bun Test Runner",
|
||||
location: vscode.TerminalLocation.Panel,
|
||||
message,
|
||||
hideFromUser: true,
|
||||
};
|
||||
|
||||
activeTerminal = vscode.window.createTerminal(terminalOptions);
|
||||
activeTerminal.show();
|
||||
|
||||
let command = customScript;
|
||||
|
||||
if (filePath.length !== 0) {
|
||||
command += ` "${filePath}"`;
|
||||
}
|
||||
|
||||
if (testName && testName.length) {
|
||||
const escapedTestName = escapeRegex(testName);
|
||||
if (customScriptSetting.length) {
|
||||
// escape the quotes in the test name
|
||||
command += ` -t "${escapedTestName}"`;
|
||||
} else {
|
||||
command += ` -t "${escapedTestName}"`;
|
||||
}
|
||||
}
|
||||
|
||||
if (isWatchMode) {
|
||||
command += ` --watch`;
|
||||
}
|
||||
|
||||
if (customFlag.length) {
|
||||
command += ` ${customFlag}`;
|
||||
}
|
||||
|
||||
activeTerminal.sendText(command);
|
||||
},
|
||||
);
|
||||
|
||||
// Register the "Watch Test" command, which just calls the "Run Test" command with the watch flag
|
||||
const watchTestCommand = vscode.commands.registerCommand(
|
||||
"extension.bun.watchTest",
|
||||
async (fileName?: string, testName?: string) => {
|
||||
vscode.commands.executeCommand("extension.bun.runTest", fileName, testName, true);
|
||||
},
|
||||
);
|
||||
|
||||
context.subscriptions.push(runTestCommand);
|
||||
context.subscriptions.push(watchTestCommand);
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape any special characters in the input string, so that regex-matching on it
|
||||
* will work as expected.
|
||||
* i.e `new RegExp(escapeRegex("hi (:").test("hi (:")` will return true, instead of throwing
|
||||
* an invalid regex error.
|
||||
*/
|
||||
function escapeRegex(source: string) {
|
||||
return source.replaceAll(/[^a-zA-Z0-9_+\-'"\ ]/g, "\\$&");
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ExtensionContext } from "vscode";
|
||||
import { ExtensionContext } from "vscode";
|
||||
|
||||
export const GLOBAL_STATE_VERSION = 1;
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
+++ CMakeLists.txt
|
||||
@@ -1,5 +1,5 @@
|
||||
#
|
||||
-cmake_minimum_required(VERSION 3.17 FATAL_ERROR)
|
||||
+cmake_minimum_required(VERSION 3.17...3.30 FATAL_ERROR)
|
||||
|
||||
PROJECT(libarchive C)
|
||||
#
|
||||
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12 FATAL_ERROR)
|
||||
+CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12...3.5 FATAL_ERROR)
|
||||
if(POLICY CMP0065)
|
||||
cmake_policy(SET CMP0065 NEW) #3.4 don't use `-rdynamic` with executables
|
||||
endif()
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:29:58.505101515 +0000
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:44:09.023676935 +0000
|
||||
@@ -59,12 +59,13 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c
|
||||
@@ -58,6 +58,7 @@ archive_write_set_compression_gzip(struct archive *a)
|
||||
struct private_data {
|
||||
int compression_level;
|
||||
int timestamp;
|
||||
char *original_filename;
|
||||
+ unsigned char os;
|
||||
+ unsigned char os;
|
||||
#ifdef HAVE_ZLIB_H
|
||||
z_stream stream;
|
||||
int64_t total_in;
|
||||
unsigned char *compressed;
|
||||
size_t compressed_buffer_size;
|
||||
- unsigned long crc;
|
||||
+ uint32_t crc;
|
||||
#else
|
||||
struct archive_write_program_data *pdata;
|
||||
#endif
|
||||
@@ -108,6 +109,7 @@
|
||||
@@ -106,6 +107,7 @@ archive_write_add_filter_gzip(struct archive *_a)
|
||||
archive_set_error(&a->archive, ENOMEM, "Out of memory");
|
||||
return (ARCHIVE_FATAL);
|
||||
}
|
||||
f->data = data;
|
||||
+ data->os = 3; /* default Unix */
|
||||
f->data = data;
|
||||
f->open = &archive_compressor_gzip_open;
|
||||
f->options = &archive_compressor_gzip_options;
|
||||
f->close = &archive_compressor_gzip_close;
|
||||
@@ -177,6 +179,30 @@
|
||||
@@ -166,6 +168,30 @@ archive_compressor_gzip_options(struct archive_write_filter *f, const char *key,
|
||||
return (ARCHIVE_OK);
|
||||
}
|
||||
|
||||
@@ -54,7 +47,7 @@
|
||||
/* Note: The "warn" return is just to inform the options
|
||||
* supervisor that we didn't handle it. It will generate
|
||||
* a suitable error if no one used this option. */
|
||||
@@ -236,7 +262,7 @@
|
||||
@@ -226,7 +252,7 @@ archive_compressor_gzip_open(struct archive_write_filter *f)
|
||||
data->compressed[8] = 4;
|
||||
else
|
||||
data->compressed[8] = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Version: 10
|
||||
# Version: 9
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on Windows 10 or newer with PowerShell.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 17
|
||||
# Version: 15
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -676,15 +676,9 @@ install_brew() {
|
||||
install_common_software() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
# software-properties-common is not available in Debian Trixie
|
||||
if [ "$distro" = "debian" ] && [ "$release" = "13" ]; then
|
||||
install_packages \
|
||||
apt-transport-https
|
||||
else
|
||||
install_packages \
|
||||
apt-transport-https \
|
||||
software-properties-common
|
||||
fi
|
||||
install_packages \
|
||||
apt-transport-https \
|
||||
software-properties-common
|
||||
;;
|
||||
dnf)
|
||||
install_packages \
|
||||
@@ -1510,12 +1504,12 @@ configure_core_dumps() {
|
||||
# disable apport.service if it exists since it will override the core_pattern
|
||||
if which systemctl >/dev/null; then
|
||||
if systemctl list-unit-files apport.service >/dev/null; then
|
||||
execute_sudo "$systemctl" disable --now apport.service
|
||||
execute_sudo "$systemctl" disable --now apport.service || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# load the new configuration (ignore permission errors)
|
||||
execute_sudo sysctl -p "$sysctl_file"
|
||||
execute_sudo sysctl -p "$sysctl_file" || true
|
||||
|
||||
# ensure that a regular user will be able to run sysctl
|
||||
if [ -d /sbin ]; then
|
||||
@@ -1538,17 +1532,6 @@ clean_system() {
|
||||
done
|
||||
}
|
||||
|
||||
ensure_no_tmpfs() {
|
||||
if ! [ "$os" = "linux" ]; then
|
||||
return
|
||||
fi
|
||||
if ! [ "$distro" = "ubuntu" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
execute_sudo systemctl mask tmp.mount
|
||||
}
|
||||
|
||||
main() {
|
||||
check_features "$@"
|
||||
check_operating_system
|
||||
@@ -1562,11 +1545,8 @@ main() {
|
||||
install_chromium
|
||||
install_fuse_python
|
||||
install_age
|
||||
if [ "${BUN_NO_CORE_DUMP:-0}" != "1" ]; then
|
||||
configure_core_dumps
|
||||
fi
|
||||
configure_core_dumps
|
||||
clean_system
|
||||
ensure_no_tmpfs
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
||||
@@ -44,21 +44,17 @@ if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${
|
||||
await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`;
|
||||
|
||||
console.log("moving cores out of nested directory");
|
||||
for await (const file of new Bun.Glob("bun-cores-*/*.core").scan(dir)) {
|
||||
for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) {
|
||||
fs.renameSync(join(dir, file), join(dir, basename(file)));
|
||||
}
|
||||
} else {
|
||||
console.log(`already downloaded in ${dir}`);
|
||||
}
|
||||
|
||||
const desiredCore = join(dir, (await new Bun.Glob(`*${pid}.core`).scan(dir).next()).value);
|
||||
|
||||
const args = [debuggerPath, "--core", desiredCore, join(dir, "bun-profile")];
|
||||
|
||||
console.log("launching debugger:");
|
||||
console.log(args.map(Bun.$.escape).join(" "));
|
||||
console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`);
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], {
|
||||
stdin: "inherit",
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
|
||||
@@ -20,7 +20,7 @@ async function globSources(output, patterns, excludes = []) {
|
||||
|
||||
const sources =
|
||||
paths
|
||||
.map(path => normalize(relative(root, path).replaceAll("\\", "/")))
|
||||
.map(path => normalize(relative(root, path)))
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.join("\n")
|
||||
.trim() + "\n";
|
||||
@@ -32,13 +32,8 @@ const input = await file(join(root, "cmake", "Sources.json")).json();
|
||||
|
||||
const start = performance.now();
|
||||
for (const item of input) {
|
||||
await globSources(item.output, item.paths, [
|
||||
...(item.exclude || []),
|
||||
"src/bun.js/bindings/GeneratedBindings.zig",
|
||||
"src/bun.js/bindings/GeneratedJS2Native.zig",
|
||||
]);
|
||||
await globSources(item.output, item.paths, item.exclude);
|
||||
}
|
||||
|
||||
const end = performance.now();
|
||||
|
||||
const green = "\x1b[32m";
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user