Compare commits

..

31 Commits

Author SHA1 Message Date
RiskyMH
14780a6063 . 2025-07-23 01:11:37 +10:00
autofix-ci[bot]
41354aae95 [autofix.ci] apply automated fixes 2025-07-22 14:14:57 +00:00
RiskyMH
05c58ec7d6 subthread to delete it 2025-07-23 00:11:38 +10:00
autofix-ci[bot]
499e59b30b [autofix.ci] apply automated fixes 2025-07-22 13:58:01 +00:00
RiskyMH
cf10147d3e delete node_modules 2025-07-22 23:54:03 +10:00
Jarred Sumner
5c44553a02 Update vscode-release.yml 2025-07-21 23:25:57 -07:00
Michael H
f4116bfa7d followup for vscode test runner (#21024)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-21 23:21:05 -07:00
Jarred Sumner
5aeede1ac7 Quieter build commands 2025-07-21 21:56:41 -07:00
Meghan Denny
6d2a0e30f5 test: node: revert the previous tmpdirName commit 2025-07-21 21:34:00 -07:00
Jarred Sumner
382fe74fd0 Remove noisy copy_file logs on Linux 2025-07-21 21:24:35 -07:00
Jarred Sumner
aac646dbfe Suppress linker alignment warnings in debug build on macOS 2025-07-21 21:24:35 -07:00
Jarred Sumner
da90ad84d0 Fix windows build in git bash 2025-07-21 21:24:35 -07:00
robobun
6383c8f94c Fix beforeAll hooks running for unmatched describe blocks when using test filters (#21195)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-21 20:21:29 -07:00
robobun
718e7cdc43 Upgrade libarchive to v3.8.1 (#21250)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Zack Radisic <zack@theradisic.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-21 20:08:00 -07:00
robobun
cd54db1e4b Fix Windows cross-compilation missing executable permissions (#21268)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-21 19:47:00 -07:00
Jarred Sumner
171169a237 Update CLAUDE.md 2025-07-21 19:14:24 -07:00
Jarred Sumner
5fbd99e0cb Fix parsing bug with non-sha256 integrity hashes when migrating lockfiles (#21220) 2025-07-21 17:10:06 -07:00
pfg
60faa8696f Auto cpp->zig bindings (#20881)
Co-authored-by: pfgithub <6010774+pfgithub@users.noreply.github.com>
Co-authored-by: Ben Grant <ben@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-21 16:26:07 -07:00
Meghan Denny
d2a4fb8124 test: node: much more robust tmpdirName for use with --parallel 2025-07-21 15:56:32 -07:00
Meghan Denny
a4d031a841 meta: add a --parallel flag to the runner for faster local testing (#21140)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-21 15:17:19 -07:00
Meghan Denny
56bc65932f ci: print memory usage in runner (#21163) 2025-07-21 15:12:30 -07:00
pfg
83760fc446 Sort imports in all files (#21119)
Co-authored-by: taylor.fish <contact@taylor.fish>
2025-07-21 13:26:47 -07:00
robobun
74d3610d41 Update lol-html to v2.6.0 (#21251)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-21 02:08:35 -07:00
Jarred Sumner
1d085cb4d4 CI: normalize glob-sources paths to posix paths 2025-07-21 01:24:59 -07:00
Jarred Sumner
a868e859d7 Run formatter 2025-07-21 01:19:09 -07:00
Zack Radisic
39dd5002c3 Fix CSS error with printing :is(...) pseudo class (#21249)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-07-21 00:21:33 -07:00
robobun
7940861b87 Fix extra bracket in template literal syntax highlighting (#17327) (#21187)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-20 23:38:24 -07:00
github-actions[bot]
f65f31b783 deps: update sqlite to 3.50.300 (#21222)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2025-07-20 23:05:49 -07:00
robobun
cc5d8adcb5 Enable Windows long path support (#21244)
Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-20 23:04:17 -07:00
Jarred Sumner
bbc4f89c25 Deflake test-21049.test.ts 2025-07-20 23:02:10 -07:00
Zack Radisic
f4339df16b SSG stuff (#20998)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2025-07-20 22:37:50 -07:00
891 changed files with 17393 additions and 8229 deletions

View File

@@ -46,9 +46,8 @@ jobs:
version: 0.14.0
- name: Zig Format
run: |
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
zig fmt src
bun scripts/sortImports src
./scripts/sort-imports.ts src
zig fmt src
- name: Prettier Format
run: |

47
.github/workflows/vscode-release.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: VSCode Extension Publish
on:
workflow_dispatch:
inputs:
version:
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
required: true
type: string
jobs:
publish:
name: "Publish to VS Code Marketplace"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.18"
- name: Install dependencies (root)
run: bun install
- name: Install dependencies
run: bun install
working-directory: packages/bun-vscode
- name: Set Version
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
working-directory: packages/bun-vscode
- name: Build (inspector protocol)
run: bun install && bun run build
working-directory: packages/bun-inspector-protocol
- name: Build (vscode extension)
run: bun run build
working-directory: packages/bun-vscode
- name: Publish
if: success()
run: bunx vsce publish
env:
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
working-directory: packages/bun-vscode/extension

View File

@@ -168,5 +168,5 @@
"WebKit/WebInspectorUI": true,
},
"git.detectSubmodules": false,
"bun.test.customScript": "bun-debug test"
"bun.test.customScript": "./build/debug/bun-debug test"
}

View File

@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
### Build Commands
- **Build debug version**: `bun bd` or `bun run build:debug`
- **Build debug version**: `bun bd`
- Creates a debug build at `./build/debug/bun-debug`
- Compilation takes ~2.5 minutes
- Compilation takes ~5 minutes. Don't timeout, be patient.
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`

View File

@@ -160,6 +160,7 @@ In particular, these are:
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.

View File

@@ -752,6 +752,13 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
});
}
}
{
const cppImport = b.createModule(.{
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
});
mod.addImport("cpp", cppImport);
cppImport.addImport("bun", mod);
}
inline for (.{
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },

View File

@@ -4,6 +4,8 @@
"": {
"name": "bun",
"devDependencies": {
"@lezer/common": "^1.2.3",
"@lezer/cpp": "^1.1.3",
"esbuild": "^0.21.4",
"mitata": "^0.1.11",
"peechy": "0.4.34",
@@ -87,6 +89,14 @@
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],

View File

@@ -8,6 +8,7 @@ src/codegen/bundle-functions.ts
src/codegen/bundle-modules.ts
src/codegen/class-definitions.ts
src/codegen/client-js.ts
src/codegen/cppbind.ts
src/codegen/create-hash-table.ts
src/codegen/generate-classes.ts
src/codegen/generate-compact-string-table.ts
@@ -17,3 +18,4 @@ src/codegen/generate-node-errors.ts
src/codegen/helpers.ts
src/codegen/internal-module-registry-scanner.ts
src/codegen/replacements.ts
src/codegen/shared-types.ts

View File

@@ -304,12 +304,14 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
src/bundler/linker_context/generateCompileResultForCssChunk.zig
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
src/bundler/linker_context/generateCompileResultForJSChunk.zig
src/bundler/linker_context/OutputFileListBuilder.zig
src/bundler/linker_context/postProcessCSSChunk.zig
src/bundler/linker_context/postProcessHTMLChunk.zig
src/bundler/linker_context/postProcessJSChunk.zig
src/bundler/linker_context/prepareCssAstsForChunk.zig
src/bundler/linker_context/renameSymbolsInChunk.zig
src/bundler/linker_context/scanImportsAndExports.zig
src/bundler/linker_context/StaticRouteVisitor.zig
src/bundler/linker_context/writeOutputFilesToDisk.zig
src/bundler/LinkerContext.zig
src/bundler/LinkerGraph.zig

View File

@@ -255,6 +255,10 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
${CODEGEN_PATH}/ZigGeneratedClasses.h
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
@@ -308,6 +312,27 @@ set(BUN_JAVASCRIPT_OUTPUTS
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
)
set(BUN_CPP_OUTPUTS
${CODEGEN_PATH}/cpp.zig
)
register_command(
TARGET
bun-cppbind
COMMENT
"Generating C++ --> Zig bindings"
COMMAND
${BUN_EXECUTABLE}
${CWD}/src/codegen/cppbind.ts
${CWD}/src
${CODEGEN_PATH}
SOURCES
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
${BUN_CXX_SOURCES}
OUTPUTS
${BUN_CPP_OUTPUTS}
)
register_command(
TARGET
bun-js-modules
@@ -537,6 +562,7 @@ set(BUN_ZIG_GENERATED_SOURCES
${BUN_ERROR_CODE_OUTPUTS}
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
${BUN_CPP_OUTPUTS}
)
# In debug builds, these are not embedded, but rather referenced at runtime.
@@ -606,6 +632,7 @@ register_command(
TARGETS
clone-zig
clone-zstd
bun-cppbind
SOURCES
${BUN_ZIG_SOURCES}
${BUN_ZIG_GENERATED_SOURCES}
@@ -618,10 +645,6 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
if(WIN32)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
@@ -685,7 +708,7 @@ if(WIN32)
${CODEGEN_PATH}/windows-app-info.rc
@ONLY
)
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc ${CWD}/src/bun.exe.manifest)
endif()
# --- Executable ---
@@ -958,6 +981,16 @@ if(APPLE)
-Wl,-map,${bun}.linker-map
)
if(DEBUG)
target_link_options(${bun} PUBLIC
# Suppress ALL linker warnings on macOS.
# The intent is to only suppress linker alignment warnings.
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
# If you find one, please update this to only be for linker alignment.
-Wl,-w
)
endif()
# don't strip in debug, this seems to be needed so that the Zig std library
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
# output executable

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
libarchive/libarchive
COMMIT
898dc8319355b7e985f68a9819f182aaed61b53a
7118f97c26bf0b2f426728b482f86508efc81d02
)
register_cmake_command(
@@ -20,11 +20,14 @@ register_cmake_command(
-DENABLE_WERROR=OFF
-DENABLE_BZip2=OFF
-DENABLE_CAT=OFF
-DENABLE_CPIO=OFF
-DENABLE_UNZIP=OFF
-DENABLE_EXPAT=OFF
-DENABLE_ICONV=OFF
-DENABLE_LIBB2=OFF
-DENABLE_LibGCC=OFF
-DENABLE_LIBXML2=OFF
-DENABLE_WIN32_XMLLITE=OFF
-DENABLE_LZ4=OFF
-DENABLE_LZMA=OFF
-DENABLE_LZO=OFF

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
67f1d4ffd6b74db7e053fb129dcce620193c180d
d64457d9ff0143deef025d5df7e8586092b9afb7
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)

View File

@@ -20,7 +20,7 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")

View File

@@ -7,6 +7,8 @@
"./packages/@types/bun"
],
"devDependencies": {
"@lezer/common": "^1.2.3",
"@lezer/cpp": "^1.1.3",
"esbuild": "^0.21.4",
"mitata": "^0.1.11",
"peechy": "0.4.34",
@@ -28,8 +30,8 @@
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
"build:debug": "bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",

View File

@@ -27,11 +27,17 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
- Run scripts from package.json
- Visual lockfile viewer for old binary lockfiles (`bun.lockb`)
## Bun test runner integration
Run and debug tests directly from VSCode's Testing panel. The extension automatically discovers test files, shows inline test status, and provides rich error messages with diffs.
![Test runner example](https://raw.githubusercontent.com/oven-sh/bun/refs/heads/main/packages/bun-vscode/assets/bun-test.gif)
## In-editor error messages
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
![Error messages example](https://raw.githubusercontent.com/oven-sh/bun/refs/heads/main/packages/bun-vscode/error-messages.gif)
![Error messages example](https://raw.githubusercontent.com/oven-sh/bun/refs/heads/main/packages/bun-vscode/assets/error-messages.gif)
<div align="center">
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 MiB

View File

Before

Width:  |  Height:  |  Size: 462 KiB

After

Width:  |  Height:  |  Size: 462 KiB

View File

@@ -102,8 +102,6 @@
"@types/ws": ["@types/ws@8.5.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ=="],
"@types/xml2js": ["@types/xml2js@0.4.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ=="],
"@vscode/debugadapter": ["@vscode/debugadapter@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-VDGLUFDVAdnftUebZe4uQCIFUbJ7rTc2Grps4D/CXl+qyzTZSQLv5VADEOZ6kBYG4SvlnMLql5vPQ0G6XvUCvQ=="],
"@vscode/debugadapter-testsupport": ["@vscode/debugadapter-testsupport@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-M/8aNX1aFvupd+SP0NLEVLKUK9y52BuCK5vKO2gzdpSoRUR2fR8oFbGkTie+/p2Yrcswnuf7hFx0xWkV9avRdg=="],

View File

@@ -10,15 +10,13 @@
"devDependencies": {
"@types/bun": "^1.1.10",
"@types/vscode": "^1.60.0",
"@types/xml2js": "^0.4.14",
"@vscode/debugadapter": "^1.56.0",
"@vscode/debugadapter-testsupport": "^1.56.0",
"@vscode/test-cli": "^0.0.10",
"@vscode/test-electron": "^2.4.1",
"@vscode/vsce": "^2.20.1",
"esbuild": "^0.19.2",
"typescript": "^5.0.0",
"xml2js": "^0.6.2"
"typescript": "^5.0.0"
},
"activationEvents": [
"onStartupFinished"
@@ -73,7 +71,7 @@
},
"bun.test.filePattern": {
"type": "string",
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts,cjs,mjs}",
"description": "Glob pattern to find test files"
},
"bun.test.customFlag": {
@@ -83,8 +81,14 @@
},
"bun.test.customScript": {
"type": "string",
"default": "",
"default": "bun test",
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
},
"bun.test.enable": {
"type": "boolean",
"description": "If the test explorer should be enabled and integrated with your editor",
"scope": "window",
"default": true
}
}
},

View File

@@ -0,0 +1,864 @@
import { describe, expect, test } from "bun:test";
import { MockTestController, MockWorkspaceFolder } from "./vscode-types.mock";
import "./vscode.mock";
import { makeTestController, makeWorkspaceFolder } from "./vscode.mock";
const { BunTestController } = await import("../bun-test-controller");
const mockTestController: MockTestController = makeTestController();
const mockWorkspaceFolder: MockWorkspaceFolder = makeWorkspaceFolder("/test/workspace");
const controller = new BunTestController(mockTestController, mockWorkspaceFolder, true);
const internal = controller._internal;
const { expandEachTests, parseTestBlocks, getBraceDepth } = internal;
describe("BunTestController (static file parser)", () => {
describe("expandEachTests", () => {
describe("$variable syntax", () => {
test("should not expand $variable patterns (Bun behavior)", () => {
const content = `test.each([
{ a: 1, b: 2, expected: 3 },
{ a: 5, b: 5, expected: 10 }
])('$a + $b = $expected', ({ a, b, expected }) => {})`;
const result = expandEachTests("test.each([", "$a + $b = $expected", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$a + $b = $expected");
});
test("should not expand string values with quotes", () => {
const content = `test.each([
{ name: "Alice", city: "NYC" },
{ name: "Bob", city: "LA" }
])('$name from $city', ({ name, city }) => {})`;
const result = expandEachTests("test.each([", "$name from $city", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$name from $city");
});
test("should not expand nested property access", () => {
const content = `test.each([
{ user: { name: "Alice", profile: { city: "NYC" } } },
{ user: { name: "Bob", profile: { city: "LA" } } }
])('$user.name from $user.profile.city', ({ user }) => {})`;
const result = expandEachTests("test.each([", "$user.name from $user.profile.city", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$user.name from $user.profile.city");
});
test("should not expand array indexing", () => {
const content = `test.each([
{ users: [{ name: "Alice" }, { name: "Bob" }] },
{ users: [{ name: "Carol" }, { name: "Dave" }] }
])('first user: $users.0.name', ({ users }) => {})`;
const result = expandEachTests("test.each([", "first user: $users.0.name", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("first user: $users.0.name");
});
test("should return template as-is for missing properties", () => {
const content = `test.each([
{ a: 1 },
{ a: 2 }
])('$a and $missing', ({ a }) => {})`;
const result = expandEachTests("test.each([", "$a and $missing", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$a and $missing");
});
test("should handle edge cases with special identifiers", () => {
const content = `test.each([
{ _valid: "ok", $dollar: "yes", _123mix: "mixed" }
])('$_valid | $$dollar | $_123mix', (obj) => {})`;
const result = expandEachTests("test.each([", "$_valid | $$dollar | $_123mix", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$_valid | $$dollar | $_123mix");
});
test("should handle invalid identifiers as literals", () => {
const content = `test.each([
{ valid: "test" }
])('$valid | $123invalid | $has-dash', (obj) => {})`;
const result = expandEachTests("test.each([", "$valid | $123invalid | $has-dash", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$valid | $123invalid | $has-dash");
});
});
describe("% formatters", () => {
test("should handle %i for integers", () => {
const content = `test.each([
[1, 2, 3],
[5, 5, 10]
])('%i + %i = %i', (a, b, expected) => {})`;
const result = expandEachTests("test.each([", "%i + %i = %i", content, 0, "test", 1);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("1 + 2 = 3");
expect(result[1].name).toBe("5 + 5 = 10");
});
test("should handle %s for strings", () => {
const content = `test.each([
["hello", "world"],
["foo", "bar"]
])('%s %s', (a, b) => {})`;
const result = expandEachTests("test.each([", "%s %s", content, 0, "test", 1);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("hello world");
expect(result[1].name).toBe("foo bar");
});
test("should handle %f and %d for numbers", () => {
const content = `test.each([
[1.5, 2.7],
[3.14, 2.71]
])('%f and %d', (a, b) => {})`;
const result = expandEachTests("test.each([", "%f and %d", content, 0, "test", 1);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("1.5 and 2.7");
expect(result[1].name).toBe("3.14 and 2.71");
});
test("should handle %o and %j for objects", () => {
const content = `test.each([
[{ a: 1 }, { b: 2 }]
])('%o and %j', (obj1, obj2) => {})`;
const result = expandEachTests("test.each([", "%o and %j", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("%o and %j");
});
test("should handle %# for index", () => {
const content = `test.each([
[1, 2],
[3, 4],
[5, 6]
])('Test #%#: %i + %i', (a, b) => {})`;
const result = expandEachTests("test.each([", "Test #%#: %i + %i", content, 0, "test", 1);
expect(result).toHaveLength(3);
expect(result[0].name).toBe("Test #1: 1 + 2");
expect(result[1].name).toBe("Test #2: 3 + 4");
expect(result[2].name).toBe("Test #3: 5 + 6");
});
test("should handle %% for literal percent", () => {
const content = `test.each([
[50],
[100]
])('%i%% complete', (percent) => {})`;
const result = expandEachTests("test.each([", "%i%% complete", content, 0, "test", 1);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("50% complete");
expect(result[1].name).toBe("100% complete");
});
});
describe("describe.each", () => {
test("should work with describe.each", () => {
const content = `describe.each([
{ module: "fs", method: "readFile" },
{ module: "path", method: "join" }
])('$module module', ({ module, method }) => {})`;
const result = expandEachTests("describe.each([", "$module module", content, 0, "describe", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$module module");
expect(result[0].type).toBe("describe");
});
});
describe("error handling", () => {
test("should handle non-.each tests", () => {
const result = expandEachTests("test", "regular test", "test('regular test', () => {})", 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("regular test");
});
test("should handle malformed JSON", () => {
const content = `test.each([
{ invalid json }
])('test', () => {})`;
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test");
});
test("should handle non-array values", () => {
const content = `test.each({ not: "array" })('test', () => {})`;
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test");
});
});
describe("mixed formatters", () => {
test("should handle both $ and % in objects", () => {
const content = `test.each([
{ name: "Test", index: 0 }
])('$name #%#', (obj) => {})`;
const result = expandEachTests("test.each([", "$name #%#", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$name #%#");
});
});
describe("edge cases", () => {
test("should handle complex nested objects", () => {
const content = `test.each([
{
user: {
profile: {
address: {
city: "NYC",
coords: { lat: 40.7128, lng: -74.0060 }
}
}
}
}
])('User from $user.profile.address.city at $user.profile.address.coords.lat', ({ user }) => {})`;
const result = expandEachTests(
"test.each([",
"User from $user.profile.address.city at $user.profile.address.coords.lat",
content,
0,
"test",
1,
);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("User from $user.profile.address.city at $user.profile.address.coords.lat");
});
test("should handle arrays with inline comments", () => {
const content = `test.each([
{ a: 1 }, // first test
{ a: 2 }, // second test
// { a: 3 }, // commented out test
{ a: 4 } /* final test */
])('test $a', ({ a }) => {})`;
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test $a");
});
test("should handle arrays with multiline comments", () => {
const content = `test.each([
{ name: "test1" },
/* This is a
multiline comment
that spans several lines */
{ name: "test2" },
/**
* JSDoc style comment
* with multiple lines
*/
{ name: "test3" }
])('$name', ({ name }) => {})`;
const result = expandEachTests("test.each([", "$name", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("$name");
});
test("should handle malformed array syntax gracefully", () => {
const content = `test.each([
{ a: 1 },
{ a: 2,,, }, // extra commas
{ a: 3, }, // trailing comma
{ a: 4 },,, // extra trailing commas
])('test $a', ({ a }) => {})`;
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
expect(result.length).toBeGreaterThanOrEqual(1);
});
test("should handle strings with comment-like content", () => {
const content = `test.each([
{ comment: "// this is not a comment" },
{ comment: "/* neither is this */" },
{ url: "https://example.com/path" }
])('Test: $comment $url', (data) => {})`;
const result = expandEachTests("test.each([", "Test: $comment $url", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Test: $comment $url");
});
test("should handle special characters in strings", () => {
const content = `test.each([
{ char: "\\n" },
{ char: "\\t" },
{ char: "\\"" },
{ char: "\\'" },
{ char: "\\\\" },
{ char: "\`" }
])('Special char: $char', ({ char }) => {})`;
const result = expandEachTests("test.each([", "Special char: $char", content, 0, "test", 1);
expect(result.length).toBeGreaterThanOrEqual(1);
});
test("should handle empty arrays", () => {
const content = `test.each([])('should handle empty', () => {})`;
const result = expandEachTests("test.each([", "should handle empty", content, 0, "test", 1);
expect(result).toHaveLength(0);
});
test("should handle undefined and null values", () => {
const content = `test.each([
{ value: undefined },
{ value: null },
{ value: false },
{ value: 0 },
{ value: "" }
])('Value: $value', ({ value }) => {})`;
const result = expandEachTests("test.each([", "Value: $value", content, 0, "test", 1);
if (result.length === 1) {
expect(result[0].name).toBe("Value: $value");
} else {
expect(result).toHaveLength(5);
expect(result[0].name).toBe("Value: undefined");
expect(result[1].name).toBe("Value: null");
expect(result[2].name).toBe("Value: false");
expect(result[3].name).toBe("Value: 0");
expect(result[4].name).toBe("Value: ");
}
});
test("should handle circular references gracefully", () => {
const content = `test.each([
{ a: { b: "[Circular]" } },
{ a: { b: { c: "[Circular]" } } }
])('Circular: $a.b', ({ a }) => {})`;
const result = expandEachTests("test.each([", "Circular: $a.b", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Circular: $a.b");
});
test("should handle very long property paths", () => {
const content = `test.each([
{
a: {
b: {
c: {
d: {
e: {
f: {
g: "deeply nested"
}
}
}
}
}
}
}
])('Value: $a.b.c.d.e.f.g', (data) => {})`;
const result = expandEachTests("test.each([", "Value: $a.b.c.d.e.f.g", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Value: $a.b.c.d.e.f.g");
});
test("should handle syntax errors in array", () => {
const content = `test.each([
{ a: 1 }
{ a: 2 } // missing comma
{ a: 3 }
])('test $a', ({ a }) => {})`;
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test $a");
});
test("should handle arrays with trailing commas", () => {
const content = `test.each([
{ a: 1 },
{ a: 2 },
])('test $a', ({ a }) => {})`;
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test $a");
});
test("should handle mixed data types in arrays", () => {
const content = `test.each([
["string", 123, true, null, undefined],
[{ obj: true }, [1, 2, 3], new Date("2024-01-01")]
])('test %s %i %s %s %s', (...args) => {})`;
const result = expandEachTests("test.each([", "test %s %i %s %s %s", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("test %s %i %s %s %s");
});
test("should handle regex-like strings", () => {
const content = `test.each([
{ pattern: "/^test.*$/" },
{ pattern: "\\\\d{3}-\\\\d{4}" },
{ pattern: "[a-zA-Z]+" }
])('Pattern: $pattern', ({ pattern }) => {})`;
const result = expandEachTests("test.each([", "Pattern: $pattern", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Pattern: $pattern");
});
test("should handle invalid property access gracefully", () => {
const content = `test.each([
{ a: { b: null } },
{ a: null },
{ },
{ a: { } }
])('Access: $a.b.c.d', (data) => {})`;
const result = expandEachTests("test.each([", "Access: $a.b.c.d", content, 0, "test", 1);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Access: $a.b.c.d");
});
test("should handle object methods and computed properties", () => {
const content = `test.each([
{ fn: function() {}, method() {}, arrow: () => {} },
{ ["computed"]: "value", [Symbol.for("sym")]: "symbol" }
])('Object with methods', (obj) => {})`;
const result = expandEachTests("test.each([", "Object with methods", content, 0, "test", 1);
expect(result.length).toBeGreaterThanOrEqual(1);
});
});
});
describe("parseTestBlocks", () => {
test("should parse simple test blocks", () => {
const content = `
test("should add numbers", () => {
expect(1 + 1).toBe(2);
});
test("should multiply numbers", () => {
expect(2 * 3).toBe(6);
});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("should add numbers");
expect(result[0].type).toBe("test");
expect(result[1].name).toBe("should multiply numbers");
expect(result[1].type).toBe("test");
});
test("should parse describe blocks with nested tests", () => {
const content = `
describe("Math operations", () => {
test("addition", () => {
expect(1 + 1).toBe(2);
});
test("subtraction", () => {
expect(5 - 3).toBe(2);
});
});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Math operations");
expect(result[0].type).toBe("describe");
expect(result[0].children).toHaveLength(2);
expect(result[0].children[0].name).toBe("addition");
expect(result[0].children[1].name).toBe("subtraction");
});
test("should handle test modifiers", () => {
const content = `
test.skip("skipped test", () => {});
test.todo("todo test", () => {});
test.only("only test", () => {});
test.failing("failing test", () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(4);
expect(result[0].name).toBe("skipped test");
expect(result[1].name).toBe("todo test");
expect(result[2].name).toBe("only test");
expect(result[3].name).toBe("failing test");
});
test("should handle conditional tests", () => {
const content = `
test.if(true)("conditional test", () => {});
test.skipIf(false)("skip if test", () => {});
test.todoIf(true)("todo if test", () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(3);
expect(result[0].name).toBe("conditional test");
expect(result[1].name).toBe("skip if test");
expect(result[2].name).toBe("todo if test");
});
test("should ignore comments", () => {
const content = `
// This is a comment with test("fake test", () => {})
/* Multi-line comment
test("another fake test", () => {})
*/
test("real test", () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("real test");
});
test("should handle nested describe blocks", () => {
const content = `
describe("Outer", () => {
describe("Inner", () => {
test("deeply nested", () => {});
});
test("shallow test", () => {});
});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(1);
expect(result[0].name).toBe("Outer");
expect(result[0].children).toHaveLength(2);
expect(result[0].children[0].name).toBe("Inner");
expect(result[0].children[0].children).toHaveLength(1);
expect(result[0].children[0].children[0].name).toBe("deeply nested");
expect(result[0].children[1].name).toBe("shallow test");
});
test("should handle it() as alias for test()", () => {
const content = `
it("should work with it", () => {});
it.skip("should skip with it", () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(2);
expect(result[0].name).toBe("should work with it");
expect(result[0].type).toBe("test");
expect(result[1].name).toBe("should skip with it");
});
test("should handle different quote types", () => {
const content = `
test('single quotes', () => {});
test("double quotes", () => {});
test(\`template literals\`, () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(3);
expect(result[0].name).toBe("single quotes");
expect(result[1].name).toBe("double quotes");
expect(result[2].name).toBe("template literals");
});
test("should handle escaped quotes in test names", () => {
const content = `
test("test with \\"escaped\\" quotes", () => {});
test('test with \\'escaped\\' quotes', () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(2);
expect(result[0].name).toBe('test with "escaped" quotes');
expect(result[1].name).toBe("test with 'escaped' quotes");
});
test("should handle comments within test names", () => {
const content = `
test("test with // comment syntax", () => {});
test("test with /* comment */ syntax", () => {});
test("test with URL https://example.com", () => {});
`;
const result = parseTestBlocks(content);
expect(result.length).toBeGreaterThanOrEqual(1);
const hasCommentSyntax = result.some(r => r.name.includes("comment syntax"));
const hasURL = result.some(r => r.name.includes("https://example.com"));
expect(hasCommentSyntax || hasURL).toBe(true);
});
test("should ignore code that looks like tests in strings", () => {
const content = `
const str = "test('fake test', () => {})";
const template = \`describe("fake describe", () => {})\`;
// Real test
test("real test", () => {
const example = 'test("nested fake", () => {})';
});
`;
const result = parseTestBlocks(content);
expect(result.length).toBeGreaterThanOrEqual(1);
expect(result.some(r => r.name === "real test")).toBe(true);
});
test("should handle tests with complex modifier chains", () => {
const content = `
test.skip.failing("skipped failing test", () => {});
test.only.todo("only todo test", () => {});
describe.skip.each([1, 2])("skip each %i", (n) => {});
it.failing.each([{a: 1}])("failing each $a", ({a}) => {});
`;
const result = parseTestBlocks(content);
expect(result.length).toBeGreaterThan(0);
});
test("should handle weird spacing and formatting", () => {
const content = `
test ( "extra spaces" , ( ) => { } ) ;
test
(
"multiline test"
,
(
)
=>
{
}
)
;
test\t(\t"tabs"\t,\t()\t=>\t{}\t);
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(3);
expect(result[0].name).toBe("extra spaces");
expect(result[1].name).toBe("multiline test");
expect(result[2].name).toBe("tabs");
});
test("should handle test.each with complex patterns", () => {
const content = `
test.each([
[1, 2, 3],
[4, 5, 9]
])("when %i + %i, result should be %i", (a, b, expected) => {});
describe.each([
{ db: "postgres" },
{ db: "mysql" }
])("Database $db", ({ db }) => {
test("should connect", () => {});
});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(3);
expect(result[0].name).toBe("when 1 + 2, result should be 3");
expect(result[0].type).toBe("test");
expect(result[1].name).toBe("when 4 + 5, result should be 9");
expect(result[1].type).toBe("test");
expect(result[2].name).toBe("Database $db");
expect(result[2].type).toBe("describe");
});
test("should handle Unicode and emoji in test names", () => {
const content = `
test("测试中文", () => {});
test("テスト日本語", () => {});
test("тест русский", () => {});
test("🚀 rocket test", () => {});
test("Test with 🎉 celebration", () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(5);
expect(result[0].name).toBe("测试中文");
expect(result[1].name).toBe("テスト日本語");
expect(result[2].name).toBe("тест русский");
expect(result[3].name).toBe("🚀 rocket test");
expect(result[4].name).toBe("Test with 🎉 celebration");
});
test("should handle test names with interpolation-like syntax", () => {
const content = `
test("test with \${variable}", () => {});
test("test with \$dollar", () => {});
test("test with %percent", () => {});
test(\`template literal test\`, () => {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(4);
expect(result[0].name).toBe("test with ${variable}");
expect(result[1].name).toBe("test with $dollar");
expect(result[2].name).toBe("test with %percent");
expect(result[3].name).toBe("template literal test");
});
test("should handle async/await in test definitions", () => {
const content = `
test("sync test", () => {});
test("async test", async () => {});
test("test with await", async () => {
await something();
});
it("async it", async function() {});
`;
const result = parseTestBlocks(content);
expect(result).toHaveLength(4);
expect(result[0].name).toBe("sync test");
expect(result[1].name).toBe("async test");
expect(result[2].name).toBe("test with await");
expect(result[3].name).toBe("async it");
});
test("should handle generator functions and other ES6+ syntax", () => {
const content = `
test("generator test", function* () {
yield 1;
});
test.each\`
a | b | expected
\${1} | \${1} | \${2}
\${1} | \${2} | \${3}
\`('$a + $b = $expected', ({ a, b, expected }) => {});
`;
const result = parseTestBlocks(content);
expect(result.length).toBeGreaterThanOrEqual(1);
expect(result[0].name).toBe("generator test");
});
});
describe("getBraceDepth", () => {
test("should count braces correctly", () => {
const content = "{ { } }";
expect(getBraceDepth(content, 0, content.length)).toBe(0);
expect(getBraceDepth(content, 0, 3)).toBe(2);
expect(getBraceDepth(content, 0, 5)).toBe(1);
});
test("should ignore braces in strings", () => {
const content = '{ "string with { braces }" }';
expect(getBraceDepth(content, 0, content.length)).toBe(0);
});
test("should ignore braces in template literals", () => {
const content = "{ `template with { braces }` }";
expect(getBraceDepth(content, 0, content.length)).toBe(0);
});
test("should handle escaped quotes", () => {
const content = '{ "escaped \\" quote" }';
expect(getBraceDepth(content, 0, content.length)).toBe(0);
});
test("should handle mixed quotes", () => {
const content = `{ "double" + 'single' + \`template\` }`;
expect(getBraceDepth(content, 0, content.length)).toBe(0);
});
test("should handle nested braces", () => {
const content = "{ a: { b: { c: 1 } } }";
expect(getBraceDepth(content, 0, 10)).toBe(2);
expect(getBraceDepth(content, 0, 15)).toBe(3);
});
test("should handle complex template literals", () => {
const content = '{ `${foo({ bar: "baz" })} and ${nested.value}` }';
expect(getBraceDepth(content, 0, content.length)).toBe(0);
});
test("should handle edge cases", () => {
expect(getBraceDepth("", 0, 0)).toBe(0);
expect(getBraceDepth("{{{}}}", 0, 6)).toBe(0);
expect(getBraceDepth("{{{", 0, 3)).toBe(3);
expect(getBraceDepth("}}}", 0, 3)).toBe(-3);
const templateContent = "{ `${foo}` + `${bar}` }";
expect(getBraceDepth(templateContent, 0, templateContent.length)).toBe(0);
});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,570 @@
/**
* Mock VSCode types and classes for testing
* These should be as close as possible to the real VSCode API
*/
export interface MockUri {
readonly scheme: string;
readonly authority: string;
readonly path: string;
readonly query: string;
readonly fragment: string;
readonly fsPath: string;
toString(): string;
}
export class MockUri implements MockUri {
constructor(
public readonly scheme: string,
public readonly authority: string,
public readonly path: string,
public readonly query: string,
public readonly fragment: string,
public readonly fsPath: string,
) {}
static file(path: string): MockUri {
return new MockUri("file", "", path, "", "", path);
}
toString(): string {
return `${this.scheme}://${this.authority}${this.path}`;
}
}
export class MockPosition {
constructor(
public readonly line: number,
public readonly character: number,
) {}
}
export class MockRange {
constructor(
public readonly start: MockPosition,
public readonly end: MockPosition,
) {}
}
export class MockLocation {
constructor(
public readonly uri: MockUri,
public readonly range: MockRange,
) {}
}
export class MockTestTag {
constructor(public readonly id: string) {}
}
export class MockTestMessage {
public location?: MockLocation;
public actualOutput?: string;
public expectedOutput?: string;
constructor(public message: string | MockMarkdownString) {}
static diff(message: string, expected: string, actual: string): MockTestMessage {
const msg = new MockTestMessage(message);
msg.expectedOutput = expected;
msg.actualOutput = actual;
return msg;
}
}
export class MockMarkdownString {
constructor(public value: string = "") {}
appendCodeblock(code: string, language?: string): MockMarkdownString {
this.value += `\n\`\`\`${language || ""}\n${code}\n\`\`\``;
return this;
}
appendMarkdown(value: string): MockMarkdownString {
this.value += value;
return this;
}
appendText(value: string): MockMarkdownString {
this.value += value.replace(/[\\`*_{}[\]()#+\-.!]/g, "\\$&");
return this;
}
}
export interface MockTestItem {
readonly id: string;
readonly uri?: MockUri;
readonly children: MockTestItemCollection;
readonly parent?: MockTestItem;
label: string;
description?: string;
tags: readonly MockTestTag[];
canResolveChildren: boolean;
busy: boolean;
range?: MockRange;
error?: string | MockMarkdownString;
}
export interface MockTestItemCollection {
readonly size: number;
add(item: MockTestItem): void;
replace(items: readonly MockTestItem[]): void;
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void;
get(itemId: string): MockTestItem | undefined;
delete(itemId: string): void;
[Symbol.iterator](): Iterator<[string, MockTestItem]>;
}
export class MockTestItemCollection implements MockTestItemCollection {
private items = new Map<string, MockTestItem>();
get size(): number {
return this.items.size;
}
add(item: MockTestItem): void {
this.items.set(item.id, item);
}
replace(items: readonly MockTestItem[]): void {
this.items.clear();
for (const item of items) {
this.items.set(item.id, item);
}
}
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void {
this.items.forEach((item, id) => callback(item, id, this));
}
get(itemId: string): MockTestItem | undefined {
return this.items.get(itemId);
}
delete(itemId: string): void {
this.items.delete(itemId);
}
[Symbol.iterator](): Iterator<[string, MockTestItem]> {
return this.items[Symbol.iterator]();
}
clear(): void {
this.items.clear();
}
set(id: string, item: MockTestItem): void {
this.items.set(id, item);
}
values(): IterableIterator<MockTestItem> {
return this.items.values();
}
keys(): IterableIterator<string> {
return this.items.keys();
}
entries(): IterableIterator<[string, MockTestItem]> {
return this.items.entries();
}
}
export class MockTestItem implements MockTestItem {
public canResolveChildren: boolean = false;
public busy: boolean = false;
public description?: string;
public range?: MockRange;
public error?: string | MockMarkdownString;
public readonly children: MockTestItemCollection;
constructor(
public readonly id: string,
public label: string,
public readonly uri?: MockUri,
public readonly parent?: MockTestItem,
public tags: readonly MockTestTag[] = [],
) {
this.children = new MockTestItemCollection();
}
}
export interface MockTestController {
readonly items: MockTestItemCollection;
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem;
createRunProfile(
label: string,
kind: MockTestRunProfileKind,
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
isDefault?: boolean,
): MockTestRunProfile;
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun;
invalidateTestResults(items?: readonly MockTestItem[]): void;
resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
}
export class MockTestController implements MockTestController {
public readonly items: MockTestItemCollection;
public resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
public refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
constructor(
public readonly id: string,
public readonly label: string,
) {
this.items = new MockTestItemCollection();
}
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem {
return new MockTestItem(id, label, uri);
}
createRunProfile(
label: string,
kind: MockTestRunProfileKind,
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
isDefault?: boolean,
): MockTestRunProfile {
return new MockTestRunProfile(label, kind, runHandler, isDefault);
}
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun {
return new MockTestRun(name, persist);
}
invalidateTestResults(items?: readonly MockTestItem[]): void {
// Mock implementation - in real VSCode this would invalidate test results
}
dispose(): void {
this.items.clear();
}
}
export enum MockTestRunProfileKind {
Run = 1,
Debug = 2,
Coverage = 3,
}
export interface MockTestRunProfile {
readonly label: string;
readonly kind: MockTestRunProfileKind;
readonly isDefault: boolean;
readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>;
dispose(): void;
}
export class MockTestRunProfile implements MockTestRunProfile {
constructor(
public readonly label: string,
public readonly kind: MockTestRunProfileKind,
public readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
public readonly isDefault: boolean = false,
) {}
dispose(): void {
// No-op for mock
}
}
export interface MockTestRunRequest {
readonly include?: readonly MockTestItem[];
readonly exclude?: readonly MockTestItem[];
readonly profile?: MockTestRunProfile;
}
export class MockTestRunRequest implements MockTestRunRequest {
constructor(
public readonly include?: readonly MockTestItem[],
public readonly exclude?: readonly MockTestItem[],
public readonly profile?: MockTestRunProfile,
) {}
}
export interface MockTestRun {
readonly name?: string;
readonly token: MockCancellationToken;
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void;
end(): void;
enqueued(test: MockTestItem): void;
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
passed(test: MockTestItem, duration?: number): void;
skipped(test: MockTestItem): void;
started(test: MockTestItem): void;
}
export class MockTestRun implements MockTestRun {
public readonly token: MockCancellationToken;
private _ended: boolean = false;
constructor(
public readonly name?: string,
public readonly persist: boolean = true,
) {
this.token = new MockCancellationToken();
}
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void {
if (this._ended) return;
// For mock, just store output - in real VS Code this would appear in test output
}
end(): void {
this._ended = true;
}
enqueued(test: MockTestItem): void {
if (this._ended) return;
// Mock implementation
}
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
if (this._ended) return;
// Mock implementation
}
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
if (this._ended) return;
// Mock implementation
}
passed(test: MockTestItem, duration?: number): void {
if (this._ended) return;
// Mock implementation
}
skipped(test: MockTestItem): void {
if (this._ended) return;
// Mock implementation
}
started(test: MockTestItem): void {
if (this._ended) return;
// Mock implementation
}
}
export interface MockCancellationToken {
readonly isCancellationRequested: boolean;
onCancellationRequested(listener: () => void): MockDisposable;
}
export class MockCancellationToken implements MockCancellationToken {
private _isCancellationRequested: boolean = false;
private _listeners: (() => void)[] = [];
get isCancellationRequested(): boolean {
return this._isCancellationRequested;
}
onCancellationRequested(listener: () => void): MockDisposable {
this._listeners.push(listener);
return new MockDisposable(() => {
const index = this._listeners.indexOf(listener);
if (index >= 0) {
this._listeners.splice(index, 1);
}
});
}
cancel(): void {
this._isCancellationRequested = true;
this._listeners.forEach(listener => listener());
}
}
export interface MockDisposable {
dispose(): void;
}
export class MockDisposable implements MockDisposable {
constructor(private readonly disposeFunc?: () => void) {}
dispose(): void {
this.disposeFunc?.();
}
}
export interface MockTextDocument {
readonly uri: MockUri;
readonly fileName: string;
readonly isUntitled: boolean;
readonly languageId: string;
readonly version: number;
readonly isDirty: boolean;
readonly isClosed: boolean;
readonly eol: MockEndOfLine;
readonly lineCount: number;
getText(range?: MockRange): string;
getWordRangeAtPosition(position: MockPosition, regex?: RegExp): MockRange | undefined;
lineAt(line: number | MockPosition): MockTextLine;
offsetAt(position: MockPosition): number;
positionAt(offset: number): MockPosition;
save(): Promise<boolean>;
validatePosition(position: MockPosition): MockPosition;
validateRange(range: MockRange): MockRange;
}
export enum MockEndOfLine {
LF = 1,
CRLF = 2,
}
export interface MockTextLine {
readonly lineNumber: number;
readonly text: string;
readonly range: MockRange;
readonly rangeIncludingLineBreak: MockRange;
readonly firstNonWhitespaceCharacterIndex: number;
readonly isEmptyOrWhitespace: boolean;
}
export interface MockWorkspaceFolder {
readonly uri: MockUri;
readonly name: string;
readonly index: number;
}
export class MockWorkspaceFolder implements MockWorkspaceFolder {
constructor(
public readonly uri: MockUri,
public readonly name: string,
public readonly index: number = 0,
) {}
}
export interface MockFileSystemWatcher extends MockDisposable {
readonly ignoreCreateEvents: boolean;
readonly ignoreChangeEvents: boolean;
readonly ignoreDeleteEvents: boolean;
onDidCreate(listener: (uri: MockUri) => void): MockDisposable;
onDidChange(listener: (uri: MockUri) => void): MockDisposable;
onDidDelete(listener: (uri: MockUri) => void): MockDisposable;
}
export class MockFileSystemWatcher implements MockFileSystemWatcher {
public readonly ignoreCreateEvents: boolean = false;
public readonly ignoreChangeEvents: boolean = false;
public readonly ignoreDeleteEvents: boolean = false;
private _createListeners: ((uri: MockUri) => void)[] = [];
private _changeListeners: ((uri: MockUri) => void)[] = [];
private _deleteListeners: ((uri: MockUri) => void)[] = [];
onDidCreate(listener: (uri: MockUri) => void): MockDisposable {
this._createListeners.push(listener);
return new MockDisposable(() => {
const index = this._createListeners.indexOf(listener);
if (index >= 0) this._createListeners.splice(index, 1);
});
}
onDidChange(listener: (uri: MockUri) => void): MockDisposable {
this._changeListeners.push(listener);
return new MockDisposable(() => {
const index = this._changeListeners.indexOf(listener);
if (index >= 0) this._changeListeners.splice(index, 1);
});
}
onDidDelete(listener: (uri: MockUri) => void): MockDisposable {
this._deleteListeners.push(listener);
return new MockDisposable(() => {
const index = this._deleteListeners.indexOf(listener);
if (index >= 0) this._deleteListeners.splice(index, 1);
});
}
dispose(): void {
this._createListeners.length = 0;
this._changeListeners.length = 0;
this._deleteListeners.length = 0;
}
// Helper methods for testing
triggerCreate(uri: MockUri): void {
this._createListeners.forEach(listener => listener(uri));
}
triggerChange(uri: MockUri): void {
this._changeListeners.forEach(listener => listener(uri));
}
triggerDelete(uri: MockUri): void {
this._deleteListeners.forEach(listener => listener(uri));
}
}
export interface MockRelativePattern {
readonly base: string;
readonly pattern: string;
}
export class MockRelativePattern implements MockRelativePattern {
constructor(
public readonly base: string | MockWorkspaceFolder,
public readonly pattern: string,
) {}
get baseUri(): MockUri {
if (typeof this.base === "string") {
return MockUri.file(this.base);
}
return this.base.uri;
}
}
export interface MockConfiguration {
get<T>(section: string, defaultValue?: T): T | undefined;
has(section: string): boolean;
inspect<T>(section: string): MockConfigurationInspect<T> | undefined;
update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void>;
}
export interface MockConfigurationInspect<T> {
readonly key: string;
readonly defaultValue?: T;
readonly globalValue?: T;
readonly workspaceValue?: T;
readonly workspaceFolderValue?: T;
}
export enum MockConfigurationTarget {
Global = 1,
Workspace = 2,
WorkspaceFolder = 3,
}
export class MockConfiguration implements MockConfiguration {
private _values = new Map<string, any>();
get<T>(section: string, defaultValue?: T): T | undefined {
return this._values.get(section) ?? defaultValue;
}
has(section: string): boolean {
return this._values.has(section);
}
inspect<T>(section: string): MockConfigurationInspect<T> | undefined {
return {
key: section,
defaultValue: undefined,
globalValue: this._values.get(section),
workspaceValue: undefined,
workspaceFolderValue: undefined,
};
}
async update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void> {
this._values.set(section, value);
}
// Helper for testing
setValue(section: string, value: any): void {
this._values.set(section, value);
}
}

View File

@@ -0,0 +1,56 @@
import { mock } from "bun:test";
import {
MockConfiguration,
MockDisposable,
MockFileSystemWatcher,
MockLocation,
MockMarkdownString,
MockPosition,
MockRange,
MockRelativePattern,
MockTestController,
MockTestMessage,
MockTestRunProfileKind,
MockTestTag,
MockUri,
MockWorkspaceFolder,
} from "./vscode-types.mock";
mock.module("vscode", () => ({
window: {
createOutputChannel: () => ({
appendLine: () => {},
}),
visibleTextEditors: [],
},
workspace: {
getConfiguration: (section?: string) => new MockConfiguration(),
onDidOpenTextDocument: () => new MockDisposable(),
textDocuments: [],
createFileSystemWatcher: (pattern: string | MockRelativePattern) => new MockFileSystemWatcher(),
findFiles: async (include: string, exclude?: string, maxResults?: number, token?: any) => {
return []; // Mock implementation
},
},
Uri: MockUri,
TestTag: MockTestTag,
Position: MockPosition,
Range: MockRange,
Location: MockLocation,
TestMessage: MockTestMessage,
MarkdownString: MockMarkdownString,
TestRunProfileKind: MockTestRunProfileKind,
RelativePattern: MockRelativePattern,
debug: {
addBreakpoints: () => {},
startDebugging: async () => true,
},
}));
export function makeTestController(): MockTestController {
return new MockTestController("test-controller", "Test Controller");
}
export function makeWorkspaceFolder(path: string): MockWorkspaceFolder {
return new MockWorkspaceFolder(MockUri.file(path), path.split("/").pop() || "workspace", 0);
}

View File

@@ -17,7 +17,7 @@ export const debug = vscode.window.createOutputChannel("Bun - Test Runner");
export type TestNode = {
name: string;
type: "describe" | "test" | "it";
type: "describe" | "test";
line: number;
children: TestNode[];
parent?: TestNode;
@@ -51,11 +51,15 @@ export class BunTestController implements vscode.Disposable {
private currentRunType: "file" | "individual" = "file";
private requestedTestIds: Set<string> = new Set();
private discoveredTestIds: Set<string> = new Set();
private executedTestCount: number = 0;
private totalTestsStarted: number = 0;
constructor(
private readonly testController: vscode.TestController,
private readonly workspaceFolder: vscode.WorkspaceFolder,
readonly isTest: boolean = false,
) {
if (isTest) return;
this.setupTestController();
this.setupWatchers();
this.setupOpenDocumentListener();
@@ -67,10 +71,7 @@ export class BunTestController implements vscode.Disposable {
try {
this.signal = await this.createSignal();
await this.signal.ready;
debug.appendLine(`Signal initialized at: ${this.signal.url}`);
this.signal.on("Signal.Socket.connect", (socket: net.Socket) => {
debug.appendLine("Bun connected to signal socket");
this.handleSocketConnection(socket, this.currentRun!);
});
@@ -89,8 +90,9 @@ export class BunTestController implements vscode.Disposable {
};
this.testController.refreshHandler = async token => {
const files = await this.discoverInitialTests(token);
const files = await this.discoverInitialTests(token, false);
if (!files?.length) return;
if (token.isCancellationRequested) return;
const filePaths = new Set(files.map(f => f.fsPath));
for (const [, testItem] of this.testController.items) {
@@ -134,15 +136,21 @@ export class BunTestController implements vscode.Disposable {
}
private isTestFile(document: vscode.TextDocument): boolean {
return document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mts)$/.test(document.uri.fsPath);
return (
document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mjs|mts|cts)$/.test(document.uri.fsPath)
);
}
private async discoverInitialTests(cancellationToken?: vscode.CancellationToken): Promise<vscode.Uri[] | undefined> {
private async discoverInitialTests(
cancellationToken?: vscode.CancellationToken,
reset: boolean = true,
): Promise<vscode.Uri[] | undefined> {
try {
const tests = await this.findTestFiles(cancellationToken);
this.createFileTestItems(tests);
this.createFileTestItems(tests, reset);
return tests;
} catch {
} catch (error) {
debug.appendLine(`Error in discoverInitialTests: ${error}`);
return undefined;
}
}
@@ -179,6 +187,8 @@ export class BunTestController implements vscode.Disposable {
const ignoreGlobs = new Set(["**/node_modules/**"]);
for (const ignore of ignores) {
if (cancellationToken?.isCancellationRequested) return [];
try {
const content = await fs.readFile(ignore.fsPath, { encoding: "utf8" });
const lines = content
@@ -195,13 +205,15 @@ export class BunTestController implements vscode.Disposable {
ignoreGlobs.add(path.join(cwd.trim(), line.trim()));
}
}
} catch {}
} catch {
debug.appendLine(`Error in buildIgnoreGlobs: ${ignore.fsPath}`);
}
}
return [...ignoreGlobs.values()];
}
private createFileTestItems(files: vscode.Uri[]): void {
private createFileTestItems(files: vscode.Uri[], reset: boolean = true): void {
if (files.length === 0) {
return;
}
@@ -214,7 +226,9 @@ export class BunTestController implements vscode.Disposable {
path.relative(this.workspaceFolder.uri.fsPath, file.fsPath) || file.fsPath,
file,
);
fileTestItem.children.replace([]);
if (reset) {
fileTestItem.children.replace([]);
}
fileTestItem.canResolveChildren = true;
this.testController.items.add(fileTestItem);
}
@@ -274,7 +288,13 @@ export class BunTestController implements vscode.Disposable {
return { bunCommand, testArgs };
}
private async discoverTests(testItem?: vscode.TestItem | false, filePath?: string): Promise<void> {
private async discoverTests(
testItem?: vscode.TestItem | false,
filePath?: string,
cancellationToken?: vscode.CancellationToken,
): Promise<void> {
if (cancellationToken?.isCancellationRequested) return;
let targetPath = filePath;
if (!targetPath && testItem) {
targetPath = testItem?.uri?.fsPath || this.workspaceFolder.uri.fsPath;
@@ -297,17 +317,24 @@ export class BunTestController implements vscode.Disposable {
);
this.testController.items.add(fileTestItem);
}
fileTestItem.children.replace([]);
if (!this.currentRun) {
fileTestItem.children.replace([]);
}
fileTestItem.canResolveChildren = false;
this.addTestNodes(testNodes, fileTestItem, targetPath);
} catch {}
} catch {
debug.appendLine(`Error in discoverTests: ${targetPath}`);
}
}
private parseTestBlocks(fileContent: string): TestNode[] {
const cleanContent = fileContent
.replace(/\/\*[\s\S]*?\*\//g, match => match.replace(/[^\n\r]/g, " "))
.replace(/\/\/.*$/gm, match => " ".repeat(match.length));
.replace(/('(?:[^'\\]|\\.)*'|"(?:[^"\\]|\\.)*"|`(?:[^`\\]|\\.)*`)|\/\/.*$/gm, (match, str) => {
if (str) return str;
return " ".repeat(match.length);
});
const testRegex =
/\b(describe|test|it)(?:\.(?:skip|todo|failing|only))?(?:\.(?:if|todoIf|skipIf)\s*\([^)]*\))?(?:\.each\s*\([^)]*\))?\s*\(\s*(['"`])((?:\\\2|.)*?)\2\s*(?:,|\))/g;
@@ -319,6 +346,7 @@ export class BunTestController implements vscode.Disposable {
match = testRegex.exec(cleanContent);
while (match !== null) {
const [full, type, , name] = match;
const _type = type === "it" ? "test" : type;
const line = cleanContent.slice(0, match.index).split("\n").length - 1;
while (
@@ -329,7 +357,14 @@ export class BunTestController implements vscode.Disposable {
stack.pop();
}
const expandedNodes = this.expandEachTests(full, name, cleanContent, match.index, type as TestNode["type"], line);
const expandedNodes = this.expandEachTests(
full,
name,
cleanContent,
match.index,
_type as TestNode["type"],
line,
);
for (const node of expandedNodes) {
if (stack.length === 0) {
@@ -433,16 +468,16 @@ export class BunTestController implements vscode.Disposable {
throw new Error("Not an array");
}
return eachValues.map(val => {
let testName = name;
return eachValues.map((val, testIndex) => {
let testName = name.replace(/%%/g, "%").replace(/%#/g, (testIndex + 1).toString());
if (Array.isArray(val)) {
let idx = 0;
testName = testName.replace(/%[isfd]/g, () => {
testName = testName.replace(/%[isfdojp#%]/g, () => {
const v = val[idx++];
return typeof v === "object" ? JSON.stringify(v) : String(v);
});
} else {
testName = testName.replace(/%[isfd]/g, () => {
testName = testName.replace(/%[isfdojp#%]/g, () => {
return typeof val === "object" ? JSON.stringify(val) : String(val);
});
}
@@ -475,19 +510,22 @@ export class BunTestController implements vscode.Disposable {
: this.escapeTestName(node.name);
const testId = `${filePath}#${nodePath}`;
const testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
let testItem = parent.children.get(testId);
if (!testItem) {
testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
testItem.tags = [new vscode.TestTag(node.type === "describe" ? "describe" : "test")];
if (node.type) testItem.tags = [new vscode.TestTag(node.type)];
if (typeof node.line === "number") {
testItem.range = new vscode.Range(
new vscode.Position(node.line, 0),
new vscode.Position(node.line, node.name.length),
);
if (typeof node.line === "number") {
testItem.range = new vscode.Range(
new vscode.Position(node.line, 0),
new vscode.Position(node.line, node.name.length),
);
}
parent.children.add(testItem);
}
parent.children.add(testItem);
if (node.children.length > 0) {
this.addTestNodes(node.children, testItem, filePath, nodePath);
}
@@ -500,7 +538,7 @@ export class BunTestController implements vscode.Disposable {
}
private escapeTestName(source: string): string {
return source.replace(/[^a-zA-Z0-9_\ ]/g, "\\$&");
return source.replace(/[^\w \-\u0080-\uFFFF]/g, "\\$&");
}
private async createSignal(): Promise<UnixSignal | TCPSocketSignal> {
@@ -517,6 +555,23 @@ export class BunTestController implements vscode.Disposable {
token: vscode.CancellationToken,
isDebug: boolean,
): Promise<void> {
if (this.currentRun) {
this.closeAllActiveProcesses();
this.disconnectInspector();
if (this.currentRun) {
this.currentRun.appendOutput("\n\x1b[33mCancelled: Starting new test run\x1b[0m\n");
this.currentRun.end();
this.currentRun = null;
}
}
this.totalTestsStarted++;
if (this.totalTestsStarted > 15) {
this.closeAllActiveProcesses();
this.disconnectInspector();
this.signal?.close();
this.signal = null;
}
const run = this.testController.createTestRun(request);
token.onCancellationRequested(() => {
@@ -525,6 +580,14 @@ export class BunTestController implements vscode.Disposable {
this.disconnectInspector();
});
if ("onDidDispose" in run) {
(run.onDidDispose as vscode.Event<void>)(() => {
run?.end?.();
this.closeAllActiveProcesses();
this.disconnectInspector();
});
}
const queue: vscode.TestItem[] = [];
if (request.include) {
@@ -547,7 +610,9 @@ export class BunTestController implements vscode.Disposable {
await this.runTestsWithInspector(queue, run, token);
} catch (error) {
for (const test of queue) {
run.errored(test, new vscode.TestMessage(`Error: ${error}`));
const msg = new vscode.TestMessage(`Error: ${error}`);
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
run.errored(test, msg);
}
} finally {
run.end();
@@ -557,8 +622,11 @@ export class BunTestController implements vscode.Disposable {
private async runTestsWithInspector(
tests: vscode.TestItem[],
run: vscode.TestRun,
_token: vscode.CancellationToken,
token: vscode.CancellationToken,
): Promise<void> {
const time = performance.now();
if (token.isCancellationRequested) return;
this.disconnectInspector();
const allFiles = new Set<string>();
@@ -569,13 +637,20 @@ export class BunTestController implements vscode.Disposable {
}
if (allFiles.size === 0) {
run.appendOutput("No test files found to run.\n");
return;
const errorMsg = "No test files found to run.";
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
for (const test of tests) {
const msg = new vscode.TestMessage(errorMsg);
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
run.errored(test, msg);
}
throw new Error(errorMsg);
}
for (const test of tests) {
if (token.isCancellationRequested) return;
if (test.uri && test.canResolveChildren) {
await this.discoverTests(test);
await this.discoverTests(test, undefined, token);
}
}
@@ -584,6 +659,7 @@ export class BunTestController implements vscode.Disposable {
this.requestedTestIds.clear();
this.discoveredTestIds.clear();
this.executedTestCount = 0;
for (const test of tests) {
this.requestedTestIds.add(test.id);
}
@@ -607,21 +683,38 @@ export class BunTestController implements vscode.Disposable {
resolve();
};
const handleCancel = () => {
clearTimeout(timeout);
this.signal!.off("Signal.Socket.connect", handleConnect);
reject(new Error("Test run cancelled"));
};
token.onCancellationRequested(handleCancel);
this.signal!.once("Signal.Socket.connect", handleConnect);
});
const { bunCommand, testArgs } = this.getBunExecutionConfig();
let args = [...testArgs, ...Array.from(allFiles)];
let args = [...testArgs, ...allFiles];
let printedArgs = `\x1b[34;1m>\x1b[0m \x1b[34;1m${bunCommand} ${testArgs.join(" ")}\x1b[2m`;
for (const file of allFiles) {
const f = path.relative(this.workspaceFolder.uri.fsPath, file) || file;
if (f.includes(" ")) {
printedArgs += ` ".${path.sep}${f}"`;
} else {
printedArgs += ` .${path.sep}${f}`;
}
}
if (isIndividualTestRun) {
const pattern = this.buildTestNamePattern(tests);
if (pattern) {
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
args.push("--test-name-pattern", pattern);
printedArgs += `\x1b[0m\x1b[2m --test-name-pattern "${pattern}"\x1b[0m`;
}
}
run.appendOutput(`\r\n\x1b[34m>\x1b[0m \x1b[2m${bunCommand} ${args.join(" ")}\x1b[0m\r\n\r\n`);
args.push(`--inspect-wait=${this.signal!.url}`);
run.appendOutput(printedArgs + "\x1b[0m\r\n\r\n");
for (const test of tests) {
if (isIndividualTestRun || tests.length === 1) {
@@ -631,34 +724,52 @@ export class BunTestController implements vscode.Disposable {
}
}
let inspectorUrl: string | undefined =
this.signal.url.startsWith("ws") || this.signal.url.startsWith("tcp")
? `${this.signal!.url}?wait=1`
: `${this.signal!.url}`;
// right now there isnt a way to tell socket method to wait for the connection
if (!inspectorUrl?.includes("?wait=1")) {
args.push(`--inspect-wait=${this.signal!.url}`);
inspectorUrl = undefined;
}
const proc = spawn(bunCommand, args, {
cwd: this.workspaceFolder.uri.fsPath,
env: {
...process.env,
BUN_DEBUG_QUIET_LOGS: "1",
FORCE_COLOR: "1",
NO_COLOR: "0",
BUN_INSPECT: inspectorUrl,
...process.env,
},
});
this.activeProcesses.add(proc);
let stdout = "";
proc.on("exit", (code, signal) => {
debug.appendLine(`Process exited with code ${code}, signal ${signal}`);
if (code !== 0 && code !== 1) {
debug.appendLine(`Test process failed: exit ${code}, signal ${signal}`);
}
});
proc.on("error", error => {
stdout += `Process error: ${error.message}\n`;
debug.appendLine(`Process error: ${error.message}`);
});
proc.stdout?.on("data", data => {
const dataStr = data.toString();
stdout += dataStr;
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
run.appendOutput(formattedOutput);
});
proc.stderr?.on("data", data => {
const dataStr = data.toString();
stdout += dataStr;
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
run.appendOutput(formattedOutput);
});
@@ -666,35 +777,57 @@ export class BunTestController implements vscode.Disposable {
try {
await socketPromise;
} catch (error) {
debug.appendLine(`Failed to establish inspector connection: ${error}`);
debug.appendLine(`Signal URL was: ${this.signal!.url}`);
debug.appendLine(`Command was: ${bunCommand} ${args.join(" ")}`);
debug.appendLine(`Connection failed: ${error} (URL: ${this.signal!.url})`);
throw error;
}
await new Promise<void>((resolve, reject) => {
proc.on("close", code => {
const handleClose = (code: number | null) => {
this.activeProcesses.delete(proc);
if (code === 0 || code === 1) {
resolve();
} else {
reject(new Error(`Process exited with code ${code}`));
reject(new Error(`Process exited with code ${code}. Please check the console for more details.`));
}
});
};
proc.on("error", error => {
const handleError = (error: Error) => {
this.activeProcesses.delete(proc);
reject(error);
});
};
const handleCancel = () => {
proc.kill("SIGTERM");
this.activeProcesses.delete(proc);
reject(new Error("Test run cancelled"));
};
proc.on("close", handleClose);
proc.on("error", handleError);
token.onCancellationRequested(handleCancel);
}).finally(() => {
if (isIndividualTestRun) {
this.applyPreviousResults(tests, run);
if (this.discoveredTestIds.size === 0) {
const errorMsg =
"No tests were executed. This could mean:\r\n- All tests were filtered out\r\n- The test runner crashed before running tests\r\n- No tests match the pattern";
run.appendOutput(`\n\x1b[31m\x1b[1mError:\x1b[0m\x1b[31m ${errorMsg}\x1b[0m\n`);
for (const test of tests) {
if (!this.testResultHistory.has(test.id)) {
const msg = new vscode.TestMessage(errorMsg + "\n\n----------\n" + stdout + "\n----------\n");
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
run.errored(test, msg);
}
}
}
if (isIndividualTestRun) {
this.cleanupUndiscoveredTests(tests);
} else {
this.cleanupStaleTests(tests);
if (this.discoveredTestIds.size > 0 && this.executedTestCount > 0) {
if (isIndividualTestRun) {
this.applyPreviousResults(tests, run);
this.cleanupUndiscoveredTests(tests);
} else {
this.cleanupStaleTests(tests);
}
}
if (this.activeProcesses.has(proc)) {
@@ -704,6 +837,7 @@ export class BunTestController implements vscode.Disposable {
this.disconnectInspector();
this.currentRun = null;
debug.appendLine(`Test run completed in ${performance.now() - time}ms`);
});
}
@@ -725,7 +859,7 @@ export class BunTestController implements vscode.Disposable {
run.passed(item, previousResult.duration);
break;
case "failed":
run.failed(item, previousResult.message || new vscode.TestMessage("Test failed"), previousResult.duration);
run.failed(item, [], previousResult.duration);
break;
case "skipped":
run.skipped(item);
@@ -763,16 +897,11 @@ export class BunTestController implements vscode.Disposable {
this.handleLifecycleError(event, run);
});
this.debugAdapter.on("Inspector.event", e => {
debug.appendLine(`Received inspector event: ${e.method}`);
});
this.debugAdapter.on("Inspector.error", e => {
debug.appendLine(`Inspector error: ${e}`);
});
socket.on("close", () => {
debug.appendLine("Inspector connection closed");
this.debugAdapter = null;
});
@@ -799,7 +928,6 @@ export class BunTestController implements vscode.Disposable {
const { id: inspectorTestId, url: sourceURL, name, type, parentId, line } = params;
if (!sourceURL) {
debug.appendLine(`Warning: Test found without URL: ${name}`);
return;
}
@@ -814,8 +942,6 @@ export class BunTestController implements vscode.Disposable {
this.inspectorToVSCode.set(inspectorTestId, testItem);
this.vscodeToInspector.set(testItem.id, inspectorTestId);
this.discoveredTestIds.add(testItem.id);
} else {
debug.appendLine(`Could not find VS Code test item for: ${name} in ${path.basename(filePath)}`);
}
}
@@ -931,6 +1057,7 @@ export class BunTestController implements vscode.Disposable {
if (!testItem) return;
const duration = elapsed / 1000000;
this.executedTestCount++;
if (
this.currentRunType === "individual" &&
@@ -959,7 +1086,6 @@ export class BunTestController implements vscode.Disposable {
break;
case "skip":
case "todo":
case "skipped_because_label":
run.skipped(testItem);
this.testResultHistory.set(testItem.id, { status: "skipped" });
break;
@@ -970,6 +1096,8 @@ export class BunTestController implements vscode.Disposable {
run.failed(testItem, timeoutMsg, duration);
this.testResultHistory.set(testItem.id, { status: "failed", message: timeoutMsg, duration });
break;
case "skipped_because_label":
break;
}
}
@@ -1078,7 +1206,10 @@ export class BunTestController implements vscode.Disposable {
const lines = messageLinesRaw;
const errorLine = lines[0].trim();
const messageLines = lines.slice(1).join("\n");
const messageLines = lines
.slice(1)
.filter(line => line.trim())
.join("\n");
const errorType = errorLine.replace(/^(E|e)rror: /, "").trim();
@@ -1090,8 +1221,8 @@ export class BunTestController implements vscode.Disposable {
const regex = /^Expected:\s*([\s\S]*?)\nReceived:\s*([\s\S]*?)$/;
let testMessage = vscode.TestMessage.diff(
errorLine,
messageLines.match(regex)?.[1].trim() || "",
messageLines.match(regex)?.[2].trim() || "",
messageLines.trim().match(regex)?.[1].trim() || "",
messageLines.trim().match(regex)?.[2].trim() || "",
);
if (!messageLines.match(regex)) {
const code = messageLines
@@ -1153,7 +1284,7 @@ export class BunTestController implements vscode.Disposable {
lastEffortMsg = lastEffortMsg.reverse();
}
const msg = errorLine.startsWith("error: expect")
const msg = errorType.startsWith("expect")
? `${lastEffortMsg.join("\n")}\n${errorLine.trim()}`.trim()
: `${errorLine.trim()}\n${messageLines}`.trim();
@@ -1201,12 +1332,15 @@ export class BunTestController implements vscode.Disposable {
t = t.replaceAll(/\$\{[^}]+\}/g, ".*?");
t = t.replaceAll(/\\\$\\\{[^}]+\\\}/g, ".*?");
t = t.replaceAll(/\\%[isfd]/g, ".*?");
t = t.replaceAll(/\\%[isfdojp#%]|(\\%)|(\\#)/g, ".*?");
t = t.replaceAll(/\$[\w\.\[\]]+/g, ".*?");
if (test.tags.some(tag => tag.id === "test" || tag.id === "it")) {
if (test?.tags?.some(tag => tag.id === "test" || tag.id === "it")) {
testNames.push(`^ ${t}$`);
} else {
} else if (test?.tags?.some(tag => tag.id === "describe")) {
testNames.push(`^ ${t} `);
} else {
testNames.push(t);
}
}
@@ -1242,7 +1376,13 @@ export class BunTestController implements vscode.Disposable {
const isIndividualTestRun = this.shouldUseTestNamePattern(tests);
if (testFiles.size === 0) {
run.appendOutput("No test files found to debug.\n");
const errorMsg = "No test files found to debug.";
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
for (const test of tests) {
const msg = new vscode.TestMessage(errorMsg);
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
run.errored(test, msg);
}
run.end();
return;
}
@@ -1268,7 +1408,7 @@ export class BunTestController implements vscode.Disposable {
const pattern = this.buildTestNamePattern(tests);
if (pattern) {
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
args.push("--test-name-pattern", pattern);
}
}
@@ -1289,9 +1429,12 @@ export class BunTestController implements vscode.Disposable {
if (!res) throw new Error("Failed to start debugging session");
} catch (error) {
for (const test of tests) {
run.errored(test, new vscode.TestMessage(`Error starting debugger: ${error}`));
const msg = new vscode.TestMessage(`Error starting debugger: ${error}`);
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
run.errored(test, msg);
}
}
run.appendOutput("\n\x1b[33mDebug session started. Please open the debug console to see its output.\x1b[0m\r\n");
run.end();
}
@@ -1318,6 +1461,32 @@ export class BunTestController implements vscode.Disposable {
}
this.disposables = [];
}
// a sus way to expose internal functions to the test suite
public get _internal() {
return {
expandEachTests: this.expandEachTests.bind(this),
parseTestBlocks: this.parseTestBlocks.bind(this),
getBraceDepth: this.getBraceDepth.bind(this),
buildTestNamePattern: this.buildTestNamePattern.bind(this),
stripAnsi: this.stripAnsi.bind(this),
processErrorData: this.processErrorData.bind(this),
escapeTestName: this.escapeTestName.bind(this),
shouldUseTestNamePattern: this.shouldUseTestNamePattern.bind(this),
isTestFile: this.isTestFile.bind(this),
customFilePattern: this.customFilePattern.bind(this),
getBunExecutionConfig: this.getBunExecutionConfig.bind(this),
findTestByPath: this.findTestByPath.bind(this),
findTestByName: this.findTestByName.bind(this),
createTestItem: this.createTestItem.bind(this),
createErrorMessage: this.createErrorMessage.bind(this),
cleanupTestItem: this.cleanupTestItem.bind(this),
};
}
}
function windowsVscodeUri(uri: string): string {

View File

@@ -7,8 +7,14 @@ export async function registerTests(context: vscode.ExtensionContext) {
return;
}
const config = vscode.workspace.getConfiguration("bun.test");
const enable = config.get<boolean>("enable", true);
if (!enable) {
return;
}
try {
const controller = vscode.tests.createTestController("bun-tests", "Bun Tests");
const controller = vscode.tests.createTestController("bun", "Bun Tests");
context.subscriptions.push(controller);
const bunTestController = new BunTestController(controller, workspaceFolder);

View File

@@ -2,8 +2,8 @@
+++ CMakeLists.txt
@@ -1,5 +1,5 @@
#
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12 FATAL_ERROR)
+CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12...3.5 FATAL_ERROR)
if(POLICY CMP0065)
cmake_policy(SET CMP0065 NEW) #3.4 don't use `-rdynamic` with executables
endif()
-cmake_minimum_required(VERSION 3.17 FATAL_ERROR)
+cmake_minimum_required(VERSION 3.17...3.30 FATAL_ERROR)
PROJECT(libarchive C)
#

View File

@@ -1,22 +1,29 @@
--- a/libarchive/archive_write_add_filter_gzip.c
+++ b/libarchive/archive_write_add_filter_gzip.c
@@ -58,6 +58,7 @@ archive_write_set_compression_gzip(struct archive *a)
struct private_data {
--- a/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:29:58.505101515 +0000
+++ b/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:44:09.023676935 +0000
@@ -59,12 +59,13 @@
int compression_level;
int timestamp;
+ unsigned char os;
char *original_filename;
+ unsigned char os;
#ifdef HAVE_ZLIB_H
z_stream stream;
int64_t total_in;
@@ -106,6 +107,7 @@ archive_write_add_filter_gzip(struct archive *_a)
archive_set_error(&a->archive, ENOMEM, "Out of memory");
unsigned char *compressed;
size_t compressed_buffer_size;
- unsigned long crc;
+ uint32_t crc;
#else
struct archive_write_program_data *pdata;
#endif
@@ -108,6 +109,7 @@
return (ARCHIVE_FATAL);
}
+ data->os = 3; /* default Unix */
f->data = data;
+ data->os = 3; /* default Unix */
f->open = &archive_compressor_gzip_open;
f->options = &archive_compressor_gzip_options;
@@ -166,6 +168,30 @@ archive_compressor_gzip_options(struct archive_write_filter *f, const char *key,
f->close = &archive_compressor_gzip_close;
@@ -177,6 +179,30 @@
return (ARCHIVE_OK);
}
@@ -47,7 +54,7 @@
/* Note: The "warn" return is just to inform the options
* supervisor that we didn't handle it. It will generate
* a suitable error if no one used this option. */
@@ -226,7 +252,7 @@ archive_compressor_gzip_open(struct archive_write_filter *f)
@@ -236,7 +262,7 @@
data->compressed[8] = 4;
else
data->compressed[8] = 0;

View File

@@ -20,7 +20,7 @@ async function globSources(output, patterns, excludes = []) {
const sources =
paths
.map(path => normalize(relative(root, path)))
.map(path => normalize(relative(root, path).replaceAll("\\", "/")))
.sort((a, b) => a.localeCompare(b))
.join("\n")
.trim() + "\n";

109
scripts/p-limit.mjs Normal file
View File

@@ -0,0 +1,109 @@
/**
* p-limit@6.2.0
* https://github.com/sindresorhus/p-limit
* MIT (c) Sindre Sorhus
*/
import Queue from "./yocto-queue.mjs";
export default function pLimit(concurrency) {
validateConcurrency(concurrency);
const queue = new Queue();
let activeCount = 0;
const resumeNext = () => {
if (activeCount < concurrency && queue.size > 0) {
queue.dequeue()();
// Since `pendingCount` has been decreased by one, increase `activeCount` by one.
activeCount++;
}
};
const next = () => {
activeCount--;
resumeNext();
};
const run = async (function_, resolve, arguments_) => {
const result = (async () => function_(...arguments_))();
resolve(result);
try {
await result;
} catch {}
next();
};
const enqueue = (function_, resolve, arguments_) => {
// Queue `internalResolve` instead of the `run` function
// to preserve asynchronous context.
new Promise(internalResolve => {
queue.enqueue(internalResolve);
}).then(run.bind(undefined, function_, resolve, arguments_));
(async () => {
// This function needs to wait until the next microtask before comparing
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
// after the `internalResolve` function is dequeued and called. The comparison in the if-statement
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
await Promise.resolve();
if (activeCount < concurrency) {
resumeNext();
}
})();
};
const generator = (function_, ...arguments_) =>
new Promise(resolve => {
enqueue(function_, resolve, arguments_);
});
Object.defineProperties(generator, {
activeCount: {
get: () => activeCount,
},
pendingCount: {
get: () => queue.size,
},
clearQueue: {
value() {
queue.clear();
},
},
concurrency: {
get: () => concurrency,
set(newConcurrency) {
validateConcurrency(newConcurrency);
concurrency = newConcurrency;
queueMicrotask(() => {
// eslint-disable-next-line no-unmodified-loop-condition
while (activeCount < concurrency && queue.size > 0) {
resumeNext();
}
});
},
},
});
return generator;
}
export function limitFunction(function_, option) {
const { concurrency } = option;
const limit = pLimit(concurrency);
return (...arguments_) => limit(() => function_(...arguments_));
}
function validateConcurrency(concurrency) {
if (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {
throw new TypeError("Expected `concurrency` to be a number from 1 and up");
}
}

View File

@@ -28,9 +28,10 @@ import {
writeFileSync,
} from "node:fs";
import { readFile } from "node:fs/promises";
import { userInfo } from "node:os";
import { availableParallelism, userInfo } from "node:os";
import { basename, dirname, extname, join, relative, sep } from "node:path";
import { parseArgs } from "node:util";
import pLimit from "./p-limit.mjs";
import {
getAbi,
getAbiVersion,
@@ -63,6 +64,7 @@ import {
unzip,
uploadArtifact,
} from "./utils.mjs";
let isQuiet = false;
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
const testsPath = join(cwd, "test");
@@ -153,6 +155,10 @@ const { values: options, positionals: filters } = parseArgs({
type: "boolean",
default: isBuildkite && isLinux,
},
["parallel"]: {
type: "boolean",
default: false,
},
},
});
@@ -341,6 +347,10 @@ async function runTests() {
const failedResults = [];
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
const parallelism = options["parallel"] ? availableParallelism() : 1;
console.log("parallelism", parallelism);
const limit = pLimit(parallelism);
/**
* @param {string} title
* @param {function} fn
@@ -355,12 +365,15 @@ async function runTests() {
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
}
result = await startGroup(
attempt === 1
? `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`
: `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title} ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`,
fn,
);
let grouptitle = `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`;
if (attempt > 1) grouptitle += ` ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`;
if (parallelism > 1) {
console.log(grouptitle);
result = await fn();
} else {
result = await startGroup(grouptitle, fn);
}
const { ok, stdoutPreview, error } = result;
if (ok) {
@@ -375,6 +388,7 @@ async function runTests() {
const color = attempt >= maxAttempts ? "red" : "yellow";
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
startGroup(label, () => {
if (parallelism > 1) return;
process.stderr.write(stdoutPreview);
});
@@ -434,48 +448,62 @@ async function runTests() {
}
if (!failedResults.length) {
for (const testPath of tests) {
const absoluteTestPath = join(testsPath, testPath);
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
if (isNodeTest(testPath)) {
const testContent = readFileSync(absoluteTestPath, "utf-8");
const runWithBunTest =
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
const subcommand = runWithBunTest ? "test" : "run";
const env = {
FORCE_COLOR: "0",
NO_COLOR: "1",
BUN_DEBUG_QUIET_LOGS: "1",
};
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
env.BUN_JSC_validateExceptionChecks = "1";
}
await runTest(title, async () => {
const { ok, error, stdout } = await spawnBun(execPath, {
cwd: cwd,
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
timeout: getNodeParallelTestTimeout(title),
env,
stdout: chunk => pipeTestStdout(process.stdout, chunk),
stderr: chunk => pipeTestStdout(process.stderr, chunk),
});
const mb = 1024 ** 3;
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
return {
testPath: title,
ok: ok,
status: ok ? "pass" : "fail",
error: error,
errors: [],
tests: [],
stdout: stdout,
stdoutPreview: stdoutPreview,
};
});
} else {
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
}
}
await Promise.all(
tests.map(testPath =>
limit(() => {
const absoluteTestPath = join(testsPath, testPath);
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
if (isNodeTest(testPath)) {
const testContent = readFileSync(absoluteTestPath, "utf-8");
const runWithBunTest =
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
const subcommand = runWithBunTest ? "test" : "run";
const env = {
FORCE_COLOR: "0",
NO_COLOR: "1",
BUN_DEBUG_QUIET_LOGS: "1",
};
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
env.BUN_JSC_validateExceptionChecks = "1";
}
return runTest(title, async () => {
const { ok, error, stdout } = await spawnBun(execPath, {
cwd: cwd,
args: [
subcommand,
"--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"),
absoluteTestPath,
],
timeout: getNodeParallelTestTimeout(title),
env,
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
});
const mb = 1024 ** 3;
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
return {
testPath: title,
ok: ok,
status: ok ? "pass" : "fail",
error: error,
errors: [],
tests: [],
stdout: stdout,
stdoutPreview: stdoutPreview,
};
});
} else {
return runTest(title, async () =>
spawnBunTest(execPath, join("test", testPath), {
cwd,
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
}),
);
}
}),
),
);
}
if (vendorTests?.length) {
@@ -1059,7 +1087,7 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
const env = {
GITHUB_ACTIONS: "true", // always true so annotations are parsed
};
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
env.BUN_JSC_validateExceptionChecks = "1";
}
@@ -1068,8 +1096,8 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
cwd: options["cwd"],
timeout: isReallyTest ? timeout : 30_000,
env,
stdout: chunk => pipeTestStdout(process.stdout, chunk),
stderr: chunk => pipeTestStdout(process.stderr, chunk),
stdout: options.stdout,
stderr: options.stderr,
});
const { tests, errors, stdout: stdoutPreview } = parseTestStdout(stdout, testPath);

70
scripts/sortImports.ts → scripts/sort-imports.ts Normal file → Executable file
View File

@@ -1,3 +1,4 @@
#!/usr/bin/env bun
import { readdirSync } from "fs";
import path from "path";
@@ -16,10 +17,9 @@ const usage = String.raw`
Usage: bun scripts/sortImports [options] <files...>
Options:
--help Show this help message
--no-include-pub Exclude pub imports from sorting
--no-remove-unused Don't remove unused imports
--include-unsorted Process files even if they don't have @sortImports marker
--help Show this help message
--include-pub Also sort ${"`pub`"} imports
--keep-unused Don't remove unused imports
Examples:
bun scripts/sortImports src
@@ -34,9 +34,9 @@ if (filePaths.length === 0) {
}
const config = {
includePub: !args.includes("--no-include-pub"),
removeUnused: !args.includes("--no-remove-unused"),
includeUnsorted: args.includes("--include-unsorted"),
includePub: args.includes("--include-pub"),
removeUnused: !args.includes("--keep-unused"),
normalizePaths: "./",
};
// Type definitions
@@ -68,11 +68,11 @@ function parseDeclarations(
const line = lines[i];
if (line === "// @sortImports") {
lines[i] = "";
lines[i] = DELETED_LINE;
continue;
}
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);(\s*\/\/[^\n]*)?$/;
const match = line.match(inlineDeclPattern);
if (!match) continue;
@@ -275,8 +275,6 @@ function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
// Generate the sorted output
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
const outputLines = [...lines];
outputLines.push("");
outputLines.push("// @sortImports");
for (const groupKey of sortedGroupKeys) {
const groupDeclarations = groups.get(groupKey)!;
@@ -288,22 +286,36 @@ function generateSortedOutput(lines: string[], groups: Map<string, Group>, sorte
// Add declarations to output and mark original lines for removal
for (const declaration of groupDeclarations.declarations) {
outputLines.push(declaration.whole);
outputLines[declaration.index] = "";
outputLines[declaration.index] = DELETED_LINE;
}
}
return outputLines;
}
function extractThisDeclaration(declarations: Map<string, Declaration>): Declaration | null {
for (const declaration of declarations.values()) {
if (declaration.value === "@This()") {
declarations.delete(declaration.key);
return declaration;
}
}
return null;
}
const DELETED_LINE = "%DELETED_LINE%";
// Main execution function for a single file
async function processFile(filePath: string): Promise<void> {
const originalFileContents = await Bun.file(filePath).text();
let fileContents = originalFileContents;
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
return;
if (config.normalizePaths === "") {
fileContents = fileContents.replaceAll(`@import("./`, `@import("`);
} else if (config.normalizePaths === "./") {
fileContents = fileContents.replaceAll(/@import\("([A-Za-z0-9_-][^"]*\.zig)"\)/g, '@import("./$1")');
fileContents = fileContents.replaceAll(`@import("./../`, `@import("../`);
}
console.log(`Processing: ${filePath}`);
let needsRecurse = true;
while (needsRecurse) {
@@ -312,6 +324,7 @@ async function processFile(filePath: string): Promise<void> {
const lines = fileContents.split("\n");
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
const thisDeclaration = extractThisDeclaration(declarations);
const groups = groupDeclarationsByImportPath(declarations);
promoteItemsWithChildGroups(groups);
@@ -323,13 +336,33 @@ async function processFile(filePath: string): Promise<void> {
// Remove unused declarations
if (config.removeUnused) {
for (const line of unusedLineIndices) {
sortedLines[line] = "";
sortedLines[line] = DELETED_LINE;
needsRecurse = true;
}
}
if (thisDeclaration) {
sortedLines[thisDeclaration.index] = DELETED_LINE;
}
if (thisDeclaration) {
let firstNonFileCommentLine = 0;
for (const line of sortedLines) {
if (line.startsWith("//!")) {
firstNonFileCommentLine++;
} else {
break;
}
}
const insert = [thisDeclaration.whole, ""];
if (firstNonFileCommentLine > 0) insert.unshift("");
sortedLines.splice(firstNonFileCommentLine, 0, ...insert);
}
fileContents = sortedLines.join("\n");
}
// Remove deleted lines
fileContents = fileContents.replaceAll(DELETED_LINE + "\n", "");
// fileContents = fileContents.replaceAll(DELETED_LINE, ""); // any remaining lines
// Remove any leading newlines
fileContents = fileContents.replace(/^\n+/, "");
@@ -343,7 +376,6 @@ async function processFile(filePath: string): Promise<void> {
if (fileContents === "\n") fileContents = "";
if (fileContents === originalFileContents) {
console.log(`✓ No changes: ${filePath}`);
return;
}
@@ -369,7 +401,7 @@ async function main() {
successCount++;
} catch (error) {
errorCount++;
console.error(`Failed to process ${filePath}`);
console.error(`Failed to process ${path.join(filePath, file)}:\n`, error);
}
}
continue;
@@ -380,7 +412,7 @@ async function main() {
successCount++;
} catch (error) {
errorCount++;
console.error(`Failed to process ${filePath}`);
console.error(`Failed to process ${filePath}:\n`, error);
}
}

View File

@@ -2850,6 +2850,14 @@ export function printEnvironment() {
}
});
}
if (isLinux) {
startGroup("Memory", () => {
const shell = which(["sh", "bash"]);
if (shell) {
spawnSync([shell, "-c", "free -m -w"], { stdio: "inherit" });
}
});
}
if (isWindows) {
startGroup("Disk (win)", () => {
const shell = which(["pwsh"]);
@@ -2857,6 +2865,14 @@ export function printEnvironment() {
spawnSync([shell, "-c", "get-psdrive"], { stdio: "inherit" });
}
});
startGroup("Memory", () => {
const shell = which(["pwsh"]);
if (shell) {
spawnSync([shell, "-c", "Get-Counter '\\Memory\\Available MBytes'"], { stdio: "inherit" });
console.log();
spawnSync([shell, "-c", "Get-CimInstance Win32_PhysicalMemory"], { stdio: "inherit" });
}
});
}
}

90
scripts/yocto-queue.mjs Normal file
View File

@@ -0,0 +1,90 @@
/**
* yocto-queue@1.2.1
* https://github.com/sindresorhus/yocto-queue
* MIT (c) Sindre Sorhus
*/
/*
How it works:
`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.
*/
class Node {
value;
next;
constructor(value) {
this.value = value;
}
}
export default class Queue {
#head;
#tail;
#size;
constructor() {
this.clear();
}
enqueue(value) {
const node = new Node(value);
if (this.#head) {
this.#tail.next = node;
this.#tail = node;
} else {
this.#head = node;
this.#tail = node;
}
this.#size++;
}
dequeue() {
const current = this.#head;
if (!current) {
return;
}
this.#head = this.#head.next;
this.#size--;
return current.value;
}
peek() {
if (!this.#head) {
return;
}
return this.#head.value;
// TODO: Node.js 18.
// return this.#head?.value;
}
clear() {
this.#head = undefined;
this.#tail = undefined;
this.#size = 0;
}
get size() {
return this.#size;
}
*[Symbol.iterator]() {
let current = this.#head;
while (current) {
yield current.value;
current = current.next;
}
}
*drain() {
while (this.#head) {
yield this.dequeue();
}
}
}

View File

@@ -1,155 +0,0 @@
import * as fs from "fs";
import * as path from "path";
/**
* Removes unreferenced top-level const declarations from a Zig file
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
*/
export function removeUnreferencedImports(content: string): string {
let modified = true;
let result = content;
// Keep iterating until no more changes are made
while (modified) {
modified = false;
const lines = result.split("\n");
const newLines: string[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Match top-level const declarations: const <IDENTIFIER> = ...
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
if (constMatch) {
const identifier = constMatch[1];
const assignmentPart = constMatch[2];
// Skip lines that contain '{' in the assignment (likely structs/objects)
if (assignmentPart.includes("{")) {
newLines.push(line);
continue;
}
// Check if this identifier is referenced anywhere else in the file
const isReferenced = isIdentifierReferenced(identifier, lines, i);
if (!isReferenced) {
// Skip this line (delete it)
modified = true;
console.log(`Removing unreferenced import: ${identifier}`);
continue;
}
}
newLines.push(line);
}
result = newLines.join("\n");
}
return result;
}
/**
* Check if an identifier is referenced anywhere in the file except at the declaration line
*/
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
// Create a regex that matches the identifier as a whole word
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
for (let i = 0; i < lines.length; i++) {
// Skip the declaration line itself
if (i === declarationLineIndex) {
continue;
}
const line = lines[i];
// Check if the identifier appears in this line
if (identifierRegex.test(line)) {
return true;
}
}
return false;
}
/**
* Escape special regex characters in a string
*/
function escapeRegex(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
* Process a single Zig file
*/
export function processZigFile(filePath: string): void {
try {
const content = fs.readFileSync(filePath, "utf-8");
const cleaned = removeUnreferencedImports(content);
if (content !== cleaned) {
fs.writeFileSync(filePath, cleaned);
console.log(`Cleaned: ${filePath}`);
} else {
console.log(`No changes: ${filePath}`);
}
} catch (error) {
console.error(`Error processing ${filePath}:`, error);
}
}
/**
* Process multiple Zig files or directories
*/
export function processFiles(paths: string[]): void {
for (const inputPath of paths) {
const stat = fs.statSync(inputPath);
if (stat.isDirectory()) {
// Process all .zig files in directory recursively
processDirectory(inputPath);
} else if (inputPath.endsWith(".zig")) {
processZigFile(inputPath);
} else {
console.warn(`Skipping non-Zig file: ${inputPath}`);
}
}
}
/**
* Recursively process all .zig files in a directory
*/
function processDirectory(dirPath: string): void {
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
processDirectory(fullPath);
} else if (entry.name.endsWith(".zig")) {
processZigFile(fullPath);
}
}
}
// CLI usage
if (require.main === module) {
const args = process.argv.slice(2);
if (args.length === 0) {
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
console.log("");
console.log("Examples:");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
process.exit(1);
}
processFiles(args);
}

View File

@@ -1,12 +1,4 @@
const std = @import("std");
const Environment = @import("./env.zig");
const Output = @import("output.zig");
const use_mimalloc = bun.use_mimalloc;
const Mimalloc = bun.Mimalloc;
const bun = @import("bun");
const version_string = Environment.version_string;
const Global = @This();
/// Does not have the canary tag, because it is exposed in `Bun.version`
/// "1.0.0" or "1.0.0-debug"
@@ -191,9 +183,6 @@ pub fn crash() noreturn {
Global.exit(1);
}
const Global = @This();
const string = bun.string;
pub const BunInfo = struct {
bun_version: string,
platform: Analytics.GenerateHeader.GeneratePlatform.Platform,
@@ -231,3 +220,14 @@ pub export fn Bun__onExit() void {
comptime {
_ = Bun__onExit;
}
const Output = @import("./output.zig");
const std = @import("std");
const Environment = @import("./env.zig");
const version_string = Environment.version_string;
const bun = @import("bun");
const Mimalloc = bun.Mimalloc;
const string = bun.string;
const use_mimalloc = bun.use_mimalloc;

View File

@@ -1,10 +1,4 @@
const std = @import("std");
const bun = @import("bun");
const ImportRecord = @import("./import_record.zig").ImportRecord;
const ImportKind = @import("./import_record.zig").ImportKind;
const lol = @import("./deps/lol-html.zig");
const logger = bun.logger;
const fs = bun.fs;
const HTMLScanner = @This();
allocator: std.mem.Allocator,
import_records: ImportRecord.List = .{},
@@ -303,4 +297,12 @@ pub fn HTMLProcessor(
};
}
const HTMLScanner = @This();
const lol = @import("./deps/lol-html.zig");
const std = @import("std");
const ImportKind = @import("./import_record.zig").ImportKind;
const ImportRecord = @import("./import_record.zig").ImportRecord;
const bun = @import("bun");
const fs = bun.fs;
const logger = bun.logger;

View File

@@ -1,3 +1,5 @@
const OutputFile = @This();
// Instead of keeping files in-memory, we:
// 1. Write directly to disk
// 2. (Optional) move the file to the destination
@@ -20,8 +22,24 @@ side: ?bun.bake.Side,
/// This is only set for the JS bundle, and not files associated with an
/// entrypoint like sourcemaps and bytecode
entry_point_index: ?u32,
referenced_css_files: []const Index = &.{},
referenced_css_chunks: []const Index = &.{},
source_index: Index.Optional = .none,
bake_extra: BakeExtra = .{},
pub const zero_value = OutputFile{
.loader = .file,
.src_path = Fs.Path.init(""),
.value = .noop,
.output_kind = .chunk,
.side = null,
.entry_point_index = null,
};
pub const BakeExtra = struct {
is_route: bool = false,
fully_static: bool = false,
bake_is_runtime: bool = false,
};
pub const Index = bun.GenericIndex(u32, OutputFile);
@@ -30,7 +48,7 @@ pub fn deinit(this: *OutputFile) void {
bun.default_allocator.free(this.src_path.text);
bun.default_allocator.free(this.dest_path);
bun.default_allocator.free(this.referenced_css_files);
bun.default_allocator.free(this.referenced_css_chunks);
}
// Depending on:
@@ -99,6 +117,13 @@ pub const Value = union(Kind) {
}
}
pub fn asSlice(v: Value) []const u8 {
return switch (v) {
.buffer => |buf| buf.bytes,
else => "",
};
}
pub fn toBunString(v: Value) bun.String {
return switch (v) {
.noop => bun.String.empty,
@@ -206,7 +231,8 @@ pub const Options = struct {
},
side: ?bun.bake.Side,
entry_point_index: ?u32,
referenced_css_files: []const Index = &.{},
referenced_css_chunks: []const Index = &.{},
bake_extra: BakeExtra = .{},
};
pub fn init(options: Options) OutputFile {
@@ -240,7 +266,8 @@ pub fn init(options: Options) OutputFile {
},
.side = options.side,
.entry_point_index = options.entry_point_index,
.referenced_css_files = options.referenced_css_files,
.referenced_css_chunks = options.referenced_css_chunks,
.bake_extra = options.bake_extra,
};
}
@@ -489,16 +516,16 @@ pub fn toBlob(
};
}
const OutputFile = @This();
const string = []const u8;
const FileDescriptorType = bun.FileDescriptor;
const std = @import("std");
const bun = @import("bun");
const JSC = bun.JSC;
const Fs = bun.fs;
const Loader = @import("./options.zig").Loader;
const resolver = @import("./resolver/resolver.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
const resolver = @import("./resolver/resolver.zig");
const std = @import("std");
const Loader = @import("./options.zig").Loader;
const Output = @import("./Global.zig").Output;
const bun = @import("bun");
const Environment = bun.Environment;
const FileDescriptorType = bun.FileDescriptor;
const Fs = bun.fs;
const JSC = bun.JSC;

View File

@@ -14,12 +14,7 @@
//! * `refresh_rate_ms`
//! * `initial_delay_ms`
const std = @import("std");
const builtin = @import("builtin");
const windows = std.os.windows;
const assert = bun.assert;
const Progress = @This();
const bun = @import("bun");
/// `null` if the current node (and its children) should
/// not print on update()
@@ -453,3 +448,10 @@ test "basic functionality" {
node.end();
}
}
const builtin = @import("builtin");
const std = @import("std");
const windows = std.os.windows;
const bun = @import("bun");
const assert = bun.assert;

View File

@@ -1,20 +1,6 @@
//! Originally, we tried using LIEF to inject the module graph into a MachO segment
//! But this incurred a fixed 350ms overhead on every build, which is unacceptable
//! so we give up on codesigning support on macOS for now until we can find a better solution
const bun = @import("bun");
const std = @import("std");
const Schema = bun.Schema.Api;
const strings = bun.strings;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const Syscall = bun.sys;
const SourceMap = bun.sourcemap;
const StringPointer = bun.StringPointer;
const macho = bun.macho;
const pe = bun.pe;
const w = std.os.windows;
pub const StandaloneModuleGraph = struct {
bytes: []const u8 = "",
@@ -732,6 +718,10 @@ pub const StandaloneModuleGraph = struct {
cleanup(zname, cloned_executable_fd);
Global.exit(1);
};
// Set executable permissions when running on POSIX hosts, even for Windows targets
if (comptime !Environment.isWindows) {
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
}
return cloned_executable_fd;
},
else => {
@@ -1320,3 +1310,18 @@ pub const StandaloneModuleGraph = struct {
bun.assert(header_list.items.len == string_payload_start_location);
}
};
const std = @import("std");
const w = std.os.windows;
const bun = @import("bun");
const Environment = bun.Environment;
const Global = bun.Global;
const Output = bun.Output;
const SourceMap = bun.sourcemap;
const StringPointer = bun.StringPointer;
const Syscall = bun.sys;
const macho = bun.macho;
const pe = bun.pe;
const strings = bun.strings;
const Schema = bun.Schema.Api;

View File

@@ -1,13 +1,5 @@
// https://github.com/lithdew/rheia/blob/162293d0f0e8d6572a8954c0add83f13f76b3cc6/hash_map.zig
// Apache License 2.0
const std = @import("std");
const mem = std.mem;
const math = std.math;
const testing = std.testing;
const bun = @import("bun");
const assert = bun.assert;
pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage);
@@ -785,3 +777,11 @@ test "SortedHashMap: collision test" {
try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?);
try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?);
}
const bun = @import("bun");
const assert = bun.assert;
const std = @import("std");
const math = std.math;
const mem = std.mem;
const testing = std.testing;

View File

@@ -1,5 +1,7 @@
//! Bun's cross-platform filesystem watcher. Runs on its own thread.
const Watcher = @This();
const DebugLogScope = bun.Output.Scoped(.watcher, false);
const log = DebugLogScope.log;
@@ -126,7 +128,6 @@ pub fn getHash(filepath: string) HashType {
pub const WatchItemIndex = u16;
pub const max_eviction_count = 8096;
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
// TODO: some platform-specific behavior is implemented in
// this file instead of the platform-specific file.
// ideally, the constants above can be inlined
@@ -673,13 +674,15 @@ pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: bun.Sto
}
}
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Environment = bun.Environment;
const strings = bun.strings;
const FeatureFlags = bun.FeatureFlags;
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
const options = @import("./options.zig");
const Mutex = bun.Mutex;
const std = @import("std");
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
const bun = @import("bun");
const Environment = bun.Environment;
const FeatureFlags = bun.FeatureFlags;
const Mutex = bun.Mutex;
const Output = bun.Output;
const string = bun.string;
const strings = bun.strings;

View File

@@ -1,9 +1,3 @@
const std = @import("std");
const Environment = @import("./env.zig");
const bun = @import("bun");
const OOM = bun.OOM;
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
(@intFromPtr(slice.ptr) + slice.len * @sizeOf(T)) <= (@intFromPtr(buffer.ptr) + buffer.len * @sizeOf(T)));
@@ -290,8 +284,6 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
};
}
const Mutex = bun.Mutex;
/// Append-only list.
/// Stores an initial count in .bss section of the object file
/// Overflows to heap when count is exceeded.
@@ -769,3 +761,10 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
}
};
}
const Environment = @import("./env.zig");
const std = @import("std");
const bun = @import("bun");
const Mutex = bun.Mutex;
const OOM = bun.OOM;

View File

@@ -1,5 +1,6 @@
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
//! It also allows measuring how much memory a scope has allocated.
const AllocationScope = @This();
pub const enabled = bun.Environment.enableAllocScopes;
@@ -253,6 +254,7 @@ pub inline fn downcast(a: Allocator) ?*AllocationScope {
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("bun");
const Output = bun.Output;
const StoredTrace = bun.crash_handler.StoredTrace;

View File

@@ -1,4 +1,5 @@
const MemoryReportingAllocator = @This();
const log = bun.Output.scoped(.MEM, false);
child_allocator: std.mem.Allocator,
@@ -84,7 +85,8 @@ pub const VTable = std.mem.Allocator.VTable{
};
const std = @import("std");
const bun = @import("bun");
const jsc = bun.jsc;
const Environment = bun.Environment;
const Output = bun.Output;
const jsc = bun.jsc;

View File

@@ -1,6 +1,4 @@
//! A nullable allocator the same size as `std.mem.Allocator`.
const std = @import("std");
const bun = @import("bun");
const NullableAllocator = @This();
@@ -46,3 +44,6 @@ comptime {
@compileError("Expected the sizes to be the same.");
}
}
const bun = @import("bun");
const std = @import("std");

View File

@@ -1,6 +1,3 @@
const bun = @import("bun");
const std = @import("std");
/// When cloning large amounts of data potentially multiple times, we can
/// leverage copy-on-write memory to avoid actually copying the data. To do that
/// on Linux, we need to use a memfd, which is a Linux-specific feature.
@@ -185,3 +182,6 @@ pub const LinuxMemFdAllocator = struct {
}
}
};
const bun = @import("bun");
const std = @import("std");

View File

@@ -1,6 +1,3 @@
const bun = @import("bun");
const std = @import("std");
/// Single allocation only.
///
pub const MaxHeapAllocator = struct {
@@ -51,3 +48,6 @@ pub const MaxHeapAllocator = struct {
};
}
};
const bun = @import("bun");
const std = @import("std");

View File

@@ -1,11 +1,4 @@
const mem = @import("std").mem;
const std = @import("std");
const bun = @import("bun");
const log = bun.Output.scoped(.mimalloc, true);
const assert = bun.assert;
const Allocator = mem.Allocator;
const mimalloc = @import("./mimalloc.zig");
const Environment = @import("../env.zig");
fn mimalloc_free(
_: *anyopaque,
@@ -150,3 +143,13 @@ const z_allocator_vtable = Allocator.VTable{
.remap = &std.mem.Allocator.noRemap,
.free = &ZAllocator.free_with_z_allocator,
};
const Environment = @import("../env.zig");
const mimalloc = @import("./mimalloc.zig");
const std = @import("std");
const bun = @import("bun");
const assert = bun.assert;
const mem = @import("std").mem;
const Allocator = mem.Allocator;

View File

@@ -202,7 +202,6 @@ pub const MI_SMALL_WSIZE_MAX = @as(c_int, 128);
pub const MI_SMALL_SIZE_MAX = MI_SMALL_WSIZE_MAX * @import("std").zig.c_translation.sizeof(?*anyopaque);
pub const MI_ALIGNMENT_MAX = (@as(c_int, 16) * @as(c_int, 1024)) * @as(c_ulong, 1024);
const std = @import("std");
pub fn canUseAlignedAlloc(len: usize, alignment: usize) bool {
return alignment > 0 and std.math.isPowerOfTwo(alignment) and !mi_malloc_satisfies_alignment(alignment, len);
}
@@ -211,3 +210,5 @@ inline fn mi_malloc_satisfies_alignment(alignment: usize, size: usize) bool {
return (alignment == @sizeOf(*anyopaque) or
(alignment == MI_MAX_ALIGN_SIZE and size >= (MI_MAX_ALIGN_SIZE / 2)));
}
const std = @import("std");

View File

@@ -1,12 +1,3 @@
const mem = @import("std").mem;
const std = @import("std");
const mimalloc = @import("./mimalloc.zig");
const Environment = @import("../env.zig");
const FeatureFlags = @import("../feature_flags.zig");
const Allocator = mem.Allocator;
const assert = bun.assert;
const bun = @import("bun");
const log = bun.Output.scoped(.mimalloc, true);
pub const Arena = struct {
@@ -167,3 +158,14 @@ const c_allocator_vtable = Allocator.VTable{
.remap = &Arena.remap,
.free = &Arena.free,
};
const Environment = @import("../env.zig");
const FeatureFlags = @import("../feature_flags.zig");
const mimalloc = @import("./mimalloc.zig");
const std = @import("std");
const bun = @import("bun");
const assert = bun.assert;
const mem = @import("std").mem;
const Allocator = mem.Allocator;

View File

@@ -1,5 +1,3 @@
const std = @import("std");
pub const Reader = struct {
const Self = @This();
pub const ReadError = error{EOF};
@@ -518,3 +516,5 @@ pub const analytics = struct {
}
};
};
const std = @import("std");

View File

@@ -1,11 +1,3 @@
const bun = @import("bun");
const Environment = bun.Environment;
const std = @import("std");
const Analytics = @import("./analytics_schema.zig").analytics;
const Semver = bun.Semver;
/// Enables analytics. This is used by:
/// - crash_handler.zig's `report` function to anonymously report crashes
///
@@ -375,3 +367,10 @@ pub const GenerateHeader = struct {
}
};
};
const std = @import("std");
const Analytics = @import("./analytics_schema.zig").analytics;
const bun = @import("bun");
const Environment = bun.Environment;
const Semver = bun.Semver;

View File

@@ -1,8 +1,3 @@
const std = @import("std");
const bun = @import("bun");
const js_ast = bun.JSAst;
const OOM = bun.OOM;
pub const Reader = struct {
const Self = @This();
pub const ReadError = error{EOF};
@@ -1956,6 +1951,27 @@ pub const Api = struct {
_,
pub fn fromJS(global: *bun.JSC.JSGlobalObject, value: bun.JSC.JSValue) bun.JSError!?SourceMapMode {
if (value.isString()) {
const str = try value.toSliceOrNull(global);
defer str.deinit();
const utf8 = str.slice();
if (bun.strings.eqlComptime(utf8, "none")) {
return .none;
}
if (bun.strings.eqlComptime(utf8, "inline")) {
return .@"inline";
}
if (bun.strings.eqlComptime(utf8, "external")) {
return .external;
}
if (bun.strings.eqlComptime(utf8, "linked")) {
return .linked;
}
}
return null;
}
pub fn jsonStringify(self: @This(), writer: anytype) !void {
return try writer.write(@tagName(self));
}
@@ -3349,3 +3365,9 @@ pub const Api = struct {
}
};
};
const std = @import("std");
const bun = @import("bun");
const OOM = bun.OOM;
const js_ast = bun.JSAst;

View File

@@ -85,8 +85,6 @@ pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) voi
this.bump_allocator = this.stack_allocator.get();
}
// @sortImports
const bun = @import("bun");
const std = @import("std");

View File

@@ -112,7 +112,7 @@ pub fn deinit(this: *Ast) void {
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
}
// @sortImports
pub const Class = G.Class;
const std = @import("std");
const Runtime = @import("../runtime.zig").Runtime;
@@ -128,6 +128,7 @@ const Ast = js_ast.Ast;
const CharFreq = js_ast.CharFreq;
const ExportsKind = js_ast.ExportsKind;
const Expr = js_ast.Expr;
const G = js_ast.G;
const InlinedEnumValue = js_ast.InlinedEnumValue;
const LocRef = js_ast.LocRef;
const NamedExport = js_ast.NamedExport;
@@ -138,6 +139,3 @@ const RefHashCtx = js_ast.RefHashCtx;
const Scope = js_ast.Scope;
const SlotCounts = js_ast.SlotCounts;
const Symbol = js_ast.Symbol;
const G = js_ast.G;
pub const Class = G.Class;

View File

@@ -87,7 +87,7 @@ pub const B = union(Binding.Tag) {
}
};
// @sortImports
pub const Class = G.Class;
const std = @import("std");
@@ -100,7 +100,5 @@ const Binding = js_ast.Binding;
const Expr = js_ast.Expr;
const ExprNodeIndex = js_ast.ExprNodeIndex;
const Flags = js_ast.Flags;
const Ref = js_ast.Ref;
const G = js_ast.G;
pub const Class = G.Class;
const Ref = js_ast.Ref;

View File

@@ -147,8 +147,6 @@ pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding
}
}
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -201,7 +201,11 @@ pub fn addUrlForCss(
}
}
// @sortImports
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
pub const ConstValuesMap = Ast.ConstValuesMap;
pub const NamedExports = Ast.NamedExports;
pub const NamedImports = Ast.NamedImports;
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
const std = @import("std");
@@ -213,6 +217,7 @@ const strings = bun.strings;
const MimeType = bun.http.MimeType;
const js_ast = bun.js_ast;
const Ast = js_ast.Ast;
const BundledAst = js_ast.BundledAst;
const CharFreq = js_ast.CharFreq;
const ExportsKind = js_ast.ExportsKind;
@@ -222,10 +227,3 @@ const Scope = js_ast.Scope;
const SlotCounts = js_ast.SlotCounts;
const Symbol = js_ast.Symbol;
const TlaCheck = js_ast.TlaCheck;
const Ast = js_ast.Ast;
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
pub const ConstValuesMap = Ast.ConstValuesMap;
pub const NamedExports = Ast.NamedExports;
pub const NamedImports = Ast.NamedImports;
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;

View File

@@ -124,7 +124,7 @@ pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier
return minifier;
}
// @sortImports
pub const Class = G.Class;
const std = @import("std");
@@ -133,7 +133,5 @@ const string = bun.string;
const js_ast = bun.js_ast;
const CharFreq = js_ast.CharFreq;
const NameMinifier = js_ast.NameMinifier;
const G = js_ast.G;
pub const Class = G.Class;
const NameMinifier = js_ast.NameMinifier;

View File

@@ -1416,7 +1416,7 @@ pub const Import = struct {
}
};
// @sortImports
pub const Class = G.Class;
const std = @import("std");
@@ -1438,10 +1438,8 @@ const Expr = js_ast.Expr;
const ExprNodeIndex = js_ast.ExprNodeIndex;
const ExprNodeList = js_ast.ExprNodeList;
const Flags = js_ast.Flags;
const G = js_ast.G;
const Op = js_ast.Op;
const OptionalChain = js_ast.OptionalChain;
const Ref = js_ast.Ref;
const ToJSError = js_ast.ToJSError;
const G = js_ast.G;
pub const Class = G.Class;

View File

@@ -3201,18 +3201,14 @@ pub fn StoredData(tag: Tag) type {
};
}
extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64;
fn stringToEquivalentNumberValue(str: []const u8) f64 {
// +"" -> 0
if (str.len == 0) return 0;
if (!bun.strings.isAllASCII(str))
return std.math.nan(f64);
return JSC__jsToNumber(str.ptr, str.len);
return bun.cpp.JSC__jsToNumber(str.ptr, str.len);
}
// @sortImports
const JSPrinter = @import("../js_printer.zig");
const std = @import("std");

View File

@@ -208,8 +208,6 @@ pub const Arg = struct {
}
};
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -631,8 +631,6 @@ pub const Runner = struct {
extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void;
};
// @sortImports
const DotEnv = @import("../env_loader.zig");
const std = @import("std");

View File

@@ -164,8 +164,6 @@ pub fn NewStore(comptime types: []const type, comptime count: usize) type {
};
}
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -281,8 +281,6 @@ pub const Table = brk: {
break :brk table;
};
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -212,8 +212,6 @@ pub const Continue = struct {
label: ?LocRef = null,
};
// @sortImports
const bun = @import("bun");
const logger = bun.logger;
const string = bun.string;

View File

@@ -205,8 +205,6 @@ pub inline fn kindStopsHoisting(s: *const Scope) bool {
return @intFromEnum(s.kind) >= @intFromEnum(Kind.entry);
}
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -112,8 +112,6 @@ pub const List = struct {
};
};
// @sortImports
const bun = @import("bun");
const std = @import("std");

View File

@@ -406,8 +406,6 @@ pub fn caresAboutScope(self: *Stmt) bool {
};
}
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -468,7 +468,10 @@ pub inline fn isHoisted(self: *const Symbol) bool {
return Symbol.isKindHoisted(self.kind);
}
// @sortImports
pub const isKindFunction = Symbol.Kind.isFunction;
pub const isKindHoisted = Symbol.Kind.isHoisted;
pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction;
pub const isKindPrivate = Symbol.Kind.isPrivate;
const std = @import("std");
@@ -482,8 +485,3 @@ const G = js_ast.G;
const ImportItemStatus = js_ast.ImportItemStatus;
const Ref = js_ast.Ref;
const Symbol = js_ast.Symbol;
pub const isKindFunction = Symbol.Kind.isFunction;
pub const isKindHoisted = Symbol.Kind.isHoisted;
pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction;
pub const isKindPrivate = Symbol.Kind.isPrivate;

View File

@@ -126,7 +126,7 @@ pub const TSNamespaceMember = struct {
};
};
// @sortImports
pub const Class = G.Class;
const std = @import("std");
@@ -135,7 +135,5 @@ const logger = bun.logger;
const js_ast = bun.js_ast;
const E = js_ast.E;
const Ref = js_ast.Ref;
const G = js_ast.G;
pub const Class = G.Class;
const Ref = js_ast.Ref;

View File

@@ -55,8 +55,6 @@ pub const UseDirective = enum(u2) {
}
};
// @sortImports
const std = @import("std");
const bun = @import("bun");

View File

@@ -1,8 +1,3 @@
const std = @import("std");
const bun = @import("bun");
const js_ast = bun.JSAst;
pub const NodeIndex = u32;
pub const NodeIndexNone = 4294967293;
@@ -237,3 +232,8 @@ pub const Ref = packed struct(u64) {
return &resolved_symbol_table[ref.innerIndex()];
}
};
const std = @import("std");
const bun = @import("bun");
const js_ast = bun.JSAst;

View File

@@ -1,10 +1,3 @@
const bun = @import("bun");
const Output = bun.Output;
const JSC = bun.JSC;
const uws = bun.uws;
const Environment = bun.Environment;
const std = @import("std");
pub const Loop = uws.Loop;
/// Track if an object whose file descriptor is being watched should keep the event loop alive.
@@ -1229,3 +1222,11 @@ pub const Closer = struct {
closer.fd.close();
}
};
const std = @import("std");
const bun = @import("bun");
const Environment = bun.Environment;
const JSC = bun.JSC;
const Output = bun.Output;
const uws = bun.uws;

View File

@@ -1,10 +1,3 @@
const bun = @import("bun");
const Output = bun.Output;
const JSC = bun.JSC;
const uws = bun.uws;
const Environment = bun.Environment;
const std = @import("std");
const uv = bun.windows.libuv;
pub const Loop = uv.Loop;
pub const KeepAlive = struct {
@@ -119,8 +112,6 @@ pub const KeepAlive = struct {
}
};
const Posix = @import("./posix_event_loop.zig");
pub const FilePoll = struct {
fd: bun.FileDescriptor,
owner: Owner = undefined,
@@ -419,3 +410,13 @@ pub const Closer = struct {
bun.destroy(closer);
}
};
const Posix = @import("./posix_event_loop.zig");
const std = @import("std");
const bun = @import("bun");
const Environment = bun.Environment;
const JSC = bun.JSC;
const Output = bun.Output;
const uws = bun.uws;
const uv = bun.windows.libuv;

View File

@@ -1,8 +1,3 @@
const std = @import("std");
const Environment = @import("./env.zig");
const strings = @import("./string_immutable.zig");
const bun = @import("bun");
/// This is like ArrayList except it stores the length and capacity as u32
/// In practice, it is very unusual to have lengths above 4 GB
pub fn BabyList(comptime Type: type) type {
@@ -474,3 +469,8 @@ pub fn OffsetList(comptime Type: type) type {
}
};
}
const Environment = @import("./env.zig");
const bun = @import("bun");
const std = @import("std");
const strings = @import("./string_immutable.zig");

View File

@@ -9,6 +9,7 @@
#include "JavaScriptCore/JSSourceCode.h"
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
extern "C" BunString BakeToWindowsPath(BunString a);
namespace Bake {
using namespace JSC;
@@ -107,7 +108,18 @@ static JSC::JSInternalPromise* resolvedInternalPromise(JSC::JSGlobalObject* glob
return promise;
}
extern "C" BunString BakeProdLoad(ProductionPerThread* perThreadData, BunString a);
extern "C" BunString BakeProdLoad(void* perThreadData, BunString a);
extern "C" bool BakeGlobalObject__isBakeGlobalObject(JSC::JSGlobalObject* global)
{
return global->JSCell::inherits(Bake::GlobalObject::info());
}
extern "C" void* BakeGlobalObject__getPerThreadData(JSC::JSGlobalObject* global)
{
Bake::GlobalObject* bake = jsCast<Bake::GlobalObject*>(global);
return bake->m_perThreadData;
}
JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
JSC::JSModuleLoader* loader, JSC::JSValue key,
@@ -134,12 +146,33 @@ JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
JSC::SourceProviderSourceType::Module));
return resolvedInternalPromise(globalObject, JSC::JSSourceCode::create(vm, WTFMove(sourceCode)));
}
return rejectedInternalPromise(globalObject, createTypeError(globalObject, makeString("Bundle does not have \""_s, moduleKey, "\". This is a bug in Bun's bundler."_s)));
// We unconditionally prefix the key with "bake:" inside
// BakeProdResolve in production.zig.
//
// But if someone does: `await import(resolve(import.meta.dir, "nav.ts"))`
// we don't actually want to load it from the Bake production module
// map and instead make it go through the normal codepath.
auto bakePrefixRemoved = moduleKey.substringSharingImpl("bake:"_s.length());
#ifdef _WIN32
// We normalize paths to contain forward slashes in bake so we don't
// have to worry about platform paths. Now we have to worry about
// it, because `moduleLoaderFetch(...)` may read the path from disk
// and so we need to give a Windows path to it.
auto temp = BakeToWindowsPath(Bun::toString(bakePrefixRemoved));
bakePrefixRemoved = temp.toWTFString();
#endif
JSString* bakePrefixRemovedString = jsNontrivialString(vm, bakePrefixRemoved);
JSValue bakePrefixRemovedJsvalue = bakePrefixRemovedString;
return Zig::GlobalObject::moduleLoaderFetch(globalObject, loader, bakePrefixRemovedJsvalue, parameters, script);
}
return rejectedInternalPromise(globalObject, createTypeError(globalObject, "BakeGlobalObject does not have per-thread data configured"_s));
}
return Zig::GlobalObject::moduleLoaderFetch(globalObject, loader, key, parameters, script);
auto result = Zig::GlobalObject::moduleLoaderFetch(globalObject, loader, key, parameters, script);
RETURN_IF_EXCEPTION(scope, rejectedInternalPromise(globalObject, scope.exception()->value()));
return result;
}
GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
@@ -205,7 +238,16 @@ const JSC::GlobalObjectMethodTable& GlobalObject::globalObjectMethodTable()
// TODO: remove this entire method
extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
{
JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef();
RefPtr<JSC::VM> vmPtr = JSC::VM::tryCreate(JSC::HeapType::Large);
if (!vmPtr) [[unlikely]] {
BUN_PANIC("Failed to allocate JavaScriptCore Virtual Machine. Did your computer run out of memory? Or maybe you compiled Bun with a mismatching libc++ version or compiler?");
}
// We need to unsafely ref this so it stays alive, later in
// `Zig__GlobalObject__destructOnExit` will call
// `vm.derefSuppressingSaferCPPChecking()` to free it.
vmPtr->refSuppressingSaferCPPChecking();
JSC::VM& vm = *vmPtr;
vm.heap.acquireAccess();
JSC::JSLockHolder locker(vm);
BunVirtualMachine* bunVM = Bun__getVM();
@@ -225,6 +267,11 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
global->setStackTraceLimit(10); // Node.js defaults to 10
global->isThreadLocalDefaultGlobalObject = true;
// if (shouldDisableStopIfNecessaryTimer) {
vm.heap.disableStopIfNecessaryTimer();
// }
// if you process.nextTick on a microtask we need thsi
// TODO: it segfaults! process.nextTick is scoped out for now i guess!
// vm.setOnComputeErrorInfo(computeErrorInfoWrapper);
// vm.setOnEachMicrotaskTick([global](JSC::VM &vm) -> void {
@@ -240,7 +287,7 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
return global;
}
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, ProductionPerThread* perThreadData)
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, void* perThreadData)
{
global->m_perThreadData = perThreadData;
}

View File

@@ -4,13 +4,11 @@
namespace Bake {
struct ProductionPerThread;
class GlobalObject : public Zig::GlobalObject {
public:
using Base = Zig::GlobalObject;
ProductionPerThread* m_perThreadData = nullptr;
void* m_perThreadData = nullptr;
DECLARE_INFO;
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
@@ -39,4 +37,7 @@ public:
}
};
extern "C" void* BakeGlobalObject__getPerThreadData(JSC::JSGlobalObject* global);
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, void* perThreadData);
}; // namespace Kit

View File

@@ -22,7 +22,7 @@ extern "C" BunString BakeSourceProvider__getSourceSlice(SourceProvider* provider
return Bun::toStringView(provider->source());
}
extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, BunString source, bool separateSSRGraph) {
extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(JSC::JSGlobalObject* global, BunString source, bool separateSSRGraph) {
auto& vm = JSC::getVM(global);
auto scope = DECLARE_THROW_SCOPE(vm);

View File

@@ -7,7 +7,9 @@
//!
//! All work is held in-memory, using manually managed data-oriented design.
//! For questions about DevServer, please consult the delusional @paperclover
const DevServer = @This();
pub const debug = bun.Output.Scoped(.DevServer, false);
pub const igLog = bun.Output.scoped(.IncrementalGraph, false);
pub const mapLog = bun.Output.scoped(.SourceMapStore, false);
@@ -8521,58 +8523,53 @@ pub fn getDeinitCountForTesting() usize {
return dev_server_deinit_count_for_testing;
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const Mutex = bun.Mutex;
const ArrayListUnmanaged = std.ArrayListUnmanaged;
const AutoArrayHashMapUnmanaged = std.AutoArrayHashMapUnmanaged;
const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena;
const bun = @import("bun");
const AllocationScope = bun.AllocationScope;
const Environment = bun.Environment;
const Mutex = bun.Mutex;
const Output = bun.Output;
const StringJoiner = bun.StringJoiner;
const Watcher = bun.Watcher;
const assert = bun.assert;
const assert_eql = bun.assert_eql;
const DynamicBitSetUnmanaged = bun.bit_set.DynamicBitSetUnmanaged;
const bake = bun.bake;
const FrameworkRouter = bake.FrameworkRouter;
const Route = FrameworkRouter.Route;
const OpaqueFileId = FrameworkRouter.OpaqueFileId;
const DynamicBitSetUnmanaged = bun.bit_set.DynamicBitSetUnmanaged;
const Log = bun.logger.Log;
const Output = bun.Output;
const Transpiler = bun.transpiler.Transpiler;
const BundleV2 = bun.bundle_v2.BundleV2;
const Chunk = bun.bundle_v2.Chunk;
const ContentHasher = bun.bundle_v2.ContentHasher;
const uws = bun.uws;
const AnyWebSocket = uws.AnyWebSocket;
const Request = uws.Request;
const AnyResponse = bun.uws.AnyResponse;
const MimeType = bun.http.MimeType;
const RefPtr = bun.ptr.RefPtr;
const StaticRoute = bun.server.StaticRoute;
const Transpiler = bun.transpiler.Transpiler;
const EventLoopTimer = bun.api.Timer.EventLoopTimer;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;
const HTMLBundle = JSC.API.HTMLBundle;
const Plugin = JSC.API.JSBundler.Plugin;
const EventLoopTimer = bun.api.Timer.EventLoopTimer;
const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena;
const Watcher = bun.Watcher;
const StaticRoute = bun.server.StaticRoute;
const AnyBlob = JSC.WebCore.Blob.Any;
const Plugin = JSC.API.JSBundler.Plugin;
const BunFrontendDevServerAgent = JSC.Debugger.BunFrontendDevServerAgent;
const DebuggerId = JSC.Debugger.DebuggerId;
const FrameworkRouter = bake.FrameworkRouter;
const OpaqueFileId = FrameworkRouter.OpaqueFileId;
const Route = FrameworkRouter.Route;
const BundleV2 = bun.bundle_v2.BundleV2;
const Chunk = bun.bundle_v2.Chunk;
const ContentHasher = bun.bundle_v2.ContentHasher;
const SourceMap = bun.sourcemap;
const VLQ = SourceMap.VLQ;
const StringJoiner = bun.StringJoiner;
const AllocationScope = bun.AllocationScope;
const BunFrontendDevServerAgent = JSC.Debugger.BunFrontendDevServerAgent;
const DebuggerId = JSC.Debugger.DebuggerId;
const uws = bun.uws;
const AnyResponse = bun.uws.AnyResponse;
const AnyWebSocket = uws.AnyWebSocket;
const Request = uws.Request;
const RefPtr = bun.ptr.RefPtr;
const std = @import("std");
const ArrayListUnmanaged = std.ArrayListUnmanaged;
const AutoArrayHashMapUnmanaged = std.AutoArrayHashMapUnmanaged;
const Allocator = std.mem.Allocator;

View File

@@ -1,6 +1,7 @@
//! Discovers routes from the filesystem, as instructed by the framework
//! configuration. Agnotic to all different paradigms. Supports incrementally
//! updating for DevServer, or serializing to a binary for use in production.
const FrameworkRouter = @This();
/// Metadata for route files is specified out of line, either in DevServer where
@@ -287,6 +288,11 @@ pub const EncodedPattern = struct {
},
.param => |name| {
const end = strings.indexOfCharPos(path, '/', i) orelse path.len;
// Check if we're about to exceed the maximum number of parameters
if (param_num >= MatchedParams.max_count) {
// TODO: ideally we should throw a nice user message
bun.Output.panic("Route pattern matched more than {d} parameters. Path: {s}", .{ MatchedParams.max_count, path });
}
params.params.len = @intCast(param_num + 1);
params.params.buffer[param_num] = .{
.key = name,
@@ -295,8 +301,30 @@ pub const EncodedPattern = struct {
param_num += 1;
i = if (end == path.len) end else end + 1;
},
.catch_all_optional => return true,
.catch_all => break,
.catch_all_optional, .catch_all => |name| {
// Capture remaining path segments as individual parameters
if (i < path.len) {
var segment_start = i;
while (segment_start < path.len) {
const segment_end = strings.indexOfCharPos(path, '/', segment_start) orelse path.len;
if (segment_start < segment_end) {
// Check if we're about to exceed the maximum number of parameters
if (param_num >= MatchedParams.max_count) {
// TODO: ideally we should throw a nice user message
bun.Output.panic("Route pattern matched more than {d} parameters. Path: {s}", .{ MatchedParams.max_count, path });
}
params.params.len = @intCast(param_num + 1);
params.params.buffer[param_num] = .{
.key = name,
.value = path[segment_start..segment_end],
};
param_num += 1;
}
segment_start = if (segment_end == path.len) segment_end else segment_end + 1;
}
}
return true;
},
.group => continue,
}
}
@@ -1336,15 +1364,17 @@ pub const JSFrameworkRouter = struct {
};
const std = @import("std");
const mem = std.mem;
const Allocator = mem.Allocator;
const bun = @import("bun");
const strings = bun.strings;
const Resolver = bun.resolver.Resolver;
const DirInfo = bun.resolver.DirInfo;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const JSGlobalObject = JSC.JSGlobalObject;
const CallFrame = JSC.CallFrame;
const JSGlobalObject = JSC.JSGlobalObject;
const JSValue = JSC.JSValue;
const DirInfo = bun.resolver.DirInfo;
const Resolver = bun.resolver.Resolver;
const mem = std.mem;
const Allocator = mem.Allocator;

8
src/bake/bake.d.ts vendored
View File

@@ -418,9 +418,9 @@ declare module "bun" {
}
type GetParamIterator =
| AsyncIterable<Record<string, string>, GetParamsFinalOpts>
| Iterable<Record<string, string>, GetParamsFinalOpts>
| ({ pages: Array<Record<string, string>> } & GetParamsFinalOpts);
| AsyncIterable<Record<string, string | string[]>, GetParamsFinalOpts>
| Iterable<Record<string, string | string[]>, GetParamsFinalOpts>
| ({ pages: Array<Record<string, string | string[]>> } & GetParamsFinalOpts);
type GetParamsFinalOpts = void | null | {
/**
@@ -478,7 +478,7 @@ declare module "bun" {
*/
readonly layouts: ReadonlyArray<any>;
/** Received route params. `null` if the route does not take params */
readonly params: null | Record<string, string>;
readonly params: null | Record<string, string | string[]>;
/**
* A list of js files that the route will need to be interactive.
*/

View File

@@ -38,6 +38,18 @@ pub const UserOptions = struct {
errdefer allocations.free();
var bundler_options = SplitBundlerOptions.empty;
if (try config.getOptional(global, "bundlerOptions", JSValue)) |js_options| {
if (try js_options.getOptional(global, "server", JSValue)) |server_options| {
bundler_options.server = try BuildConfigSubset.fromJS(global, server_options);
}
if (try js_options.getOptional(global, "client", JSValue)) |client_options| {
bundler_options.client = try BuildConfigSubset.fromJS(global, client_options);
}
if (try js_options.getOptional(global, "ssr", JSValue)) |ssr_options| {
bundler_options.ssr = try BuildConfigSubset.fromJS(global, ssr_options);
}
}
const framework = try Framework.fromJS(
try config.get(global, "framework") orelse {
return global.throwInvalidArguments("'" ++ api_name ++ "' is missing 'framework'", .{});
@@ -155,6 +167,45 @@ const BuildConfigSubset = struct {
env: bun.Schema.Api.DotEnvBehavior = ._none,
env_prefix: ?[]const u8 = null,
define: bun.Schema.Api.StringMap = .{ .keys = &.{}, .values = &.{} },
source_map: bun.Schema.Api.SourceMapMode = .external,
minify_syntax: ?bool = null,
minify_identifiers: ?bool = null,
minify_whitespace: ?bool = null,
pub fn fromJS(global: *JSC.JSGlobalObject, js_options: JSValue) bun.JSError!BuildConfigSubset {
var options = BuildConfigSubset{};
if (try js_options.getOptional(global, "sourcemap", JSValue)) |val| brk: {
if (try bun.Schema.Api.SourceMapMode.fromJS(global, val)) |sourcemap| {
options.source_map = sourcemap;
break :brk;
}
return bun.JSC.Node.validators.throwErrInvalidArgType(global, "sourcemap", .{}, "\"inline\" | \"external\" | \"linked\"", val);
}
if (try js_options.getOptional(global, "minify", JSValue)) |minify_options| brk: {
if (minify_options.isBoolean() and minify_options.asBoolean()) {
options.minify_syntax = minify_options.asBoolean();
options.minify_identifiers = minify_options.asBoolean();
options.minify_whitespace = minify_options.asBoolean();
break :brk;
}
if (try minify_options.getBooleanLoose(global, "whitespace")) |value| {
options.minify_whitespace = value;
}
if (try minify_options.getBooleanLoose(global, "syntax")) |value| {
options.minify_syntax = value;
}
if (try minify_options.getBooleanLoose(global, "identifiers")) |value| {
options.minify_identifiers = value;
}
}
return options;
}
};
/// A "Framework" in our eyes is simply set of bundler options that a framework
@@ -577,11 +628,6 @@ pub const Framework = struct {
try bundler_options.parsePluginArray(plugin_array, global);
}
if (try opts.getOptional(global, "bundlerOptions", JSValue)) |js_options| {
_ = js_options; // TODO:
// try bundler_options.parseInto(global, js_options, .root);
}
return framework;
}
@@ -603,7 +649,7 @@ pub const Framework = struct {
else => .none,
};
return initTranspilerWithSourceMap(
return initTranspilerWithOptions(
framework,
arena,
log,
@@ -612,10 +658,13 @@ pub const Framework = struct {
out,
bundler_options,
source_map,
null,
null,
null,
);
}
pub fn initTranspilerWithSourceMap(
pub fn initTranspilerWithOptions(
framework: *Framework,
arena: std.mem.Allocator,
log: *bun.logger.Log,
@@ -624,6 +673,9 @@ pub const Framework = struct {
out: *bun.transpiler.Transpiler,
bundler_options: *const BuildConfigSubset,
source_map: bun.options.SourceMapOption,
minify_whitespace: ?bool,
minify_syntax: ?bool,
minify_identifiers: ?bool,
) !void {
const JSAst = bun.JSAst;
@@ -688,9 +740,9 @@ pub const Framework = struct {
out.options.production = mode != .development;
out.options.tree_shaking = mode != .development;
out.options.minify_syntax = mode != .development;
out.options.minify_identifiers = mode != .development;
out.options.minify_whitespace = mode != .development;
out.options.minify_syntax = minify_syntax orelse (mode != .development);
out.options.minify_identifiers = minify_identifiers orelse (mode != .development);
out.options.minify_whitespace = minify_whitespace orelse (mode != .development);
out.options.css_chunking = true;
out.options.framework = framework;
out.options.inline_entrypoint_import_meta_main = true;

View File

@@ -38,9 +38,7 @@ pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
defer vm.deinit();
// A special global object is used to allow registering virtual modules
// that bypass Bun's normal module resolver and plugin system.
vm.global = BakeCreateProdGlobal(vm.console);
vm.regular_event_loop.global = vm.global;
vm.jsc = vm.global.vm();
vm.event_loop.ensureWaker();
const b = &vm.transpiler;
vm.preload = ctx.preloads;
@@ -82,7 +80,21 @@ pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
const api_lock = vm.jsc.getAPILock();
defer api_lock.release();
buildWithVm(ctx, cwd, vm) catch |err| switch (err) {
var pt: PerThread = .{
.input_files = &.{},
.bundled_outputs = &.{},
.output_indexes = &.{},
.module_keys = &.{},
.module_map = .{},
.source_maps = .{},
.vm = vm,
.loaded_files = bun.bit_set.AutoBitSet.initEmpty(vm.allocator, 0) catch unreachable,
.all_server_files = JSValue.null,
};
buildWithVm(ctx, cwd, vm, &pt) catch |err| switch (err) {
error.JSError => |e| {
bun.handleErrorReturnTrace(err, @errorReturnTrace());
const err_value = vm.global.takeException(e);
@@ -96,7 +108,34 @@ pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
};
}
pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMachine) !void {
pub fn writeSourcemapToDisk(
allocator: std.mem.Allocator,
file: *const OutputFile,
bundled_outputs: []const OutputFile,
source_maps: *bun.StringArrayHashMapUnmanaged(OutputFile.Index),
) !void {
// don't call this if the file does not have sourcemaps!
bun.assert(file.source_map_index != std.math.maxInt(u32));
// TODO: should we just write the sourcemaps to disk?
const source_map_index = file.source_map_index;
const source_map_file: *const OutputFile = &bundled_outputs[source_map_index];
bun.assert(source_map_file.output_kind == .sourcemap);
const without_prefix = if (bun.strings.hasPrefixComptime(file.dest_path, "./") or
(Environment.isWindows and bun.strings.hasPrefixComptime(file.dest_path, ".\\")))
file.dest_path[2..]
else
file.dest_path;
try source_maps.put(
allocator,
try std.fmt.allocPrint(allocator, "bake:/{s}", .{without_prefix}),
OutputFile.Index.init(@intCast(source_map_index)),
);
}
pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMachine, pt: *PerThread) !void {
// Load and evaluate the configuration module
const global = vm.global;
const b = &vm.transpiler;
@@ -174,10 +213,10 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
var client_transpiler: bun.transpiler.Transpiler = undefined;
var server_transpiler: bun.transpiler.Transpiler = undefined;
var ssr_transpiler: bun.transpiler.Transpiler = undefined;
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server, .@"inline");
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client, .@"inline");
try framework.initTranspilerWithOptions(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server, bun.options.SourceMapOption.fromApi(options.bundler_options.server.source_map), options.bundler_options.server.minify_whitespace, options.bundler_options.server.minify_syntax, options.bundler_options.server.minify_identifiers);
try framework.initTranspilerWithOptions(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client, bun.options.SourceMapOption.fromApi(options.bundler_options.client.source_map), options.bundler_options.client.minify_whitespace, options.bundler_options.client.minify_syntax, options.bundler_options.client.minify_identifiers);
if (separate_ssr_graph) {
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr, .@"inline");
try framework.initTranspilerWithOptions(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr, bun.options.SourceMapOption.fromApi(options.bundler_options.ssr.source_map), options.bundler_options.ssr.minify_whitespace, options.bundler_options.ssr.minify_syntax, options.bundler_options.ssr.minify_identifiers);
}
if (ctx.bundler_options.bake_debug_disable_minify) {
@@ -261,6 +300,11 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
.{ .js = vm.event_loop },
);
const bundled_outputs = bundled_outputs_list.items;
if (bundled_outputs.len == 0) {
Output.prettyln("done", .{});
Output.flush();
return;
}
Output.prettyErrorln("Rendering routes", .{});
Output.flush();
@@ -268,6 +312,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
var root_dir = try std.fs.cwd().makeOpenPath("dist", .{});
defer root_dir.close();
var maybe_runtime_file_index: ?u32 = null;
var css_chunks_count: usize = 0;
var css_chunks_first: usize = 0;
@@ -278,16 +324,22 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
const module_keys = try vm.allocator.alloc(bun.String, entry_points.files.count());
const output_indexes = entry_points.files.values();
var output_module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index) = .{};
var source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index) = .{};
@memset(module_keys, bun.String.dead);
for (bundled_outputs, 0..) |file, i| {
log("{s} - {s} : {s} - {?d}\n", .{
log("src_index={any} side={s} src={s} dest={s} - {?d}\n", .{
file.source_index.unwrap(),
if (file.side) |s| @tagName(s) else "null",
file.src_path.text,
file.dest_path,
file.entry_point_index,
});
if (file.loader.isCSS()) {
if (css_chunks_count == 0) css_chunks_first = i;
if (css_chunks_count == 0) {
css_chunks_first = i;
} else {
css_chunks_first = @min(css_chunks_first, i);
}
css_chunks_count += 1;
}
@@ -297,6 +349,17 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
}
}
// The output file which contains the runtime (Index.runtime, contains
// wrapper functions like `__esm`) is marked as server side, but it is
// also used by client
if (file.bake_extra.bake_is_runtime) {
if (comptime bun.Environment.allow_assert) {
bun.assertf(maybe_runtime_file_index == null, "Runtime file should only be in one chunk.", .{});
}
maybe_runtime_file_index = @intCast(i);
}
// TODO: Maybe not do all the disk-writing in 1 thread?
switch (file.side orelse continue) {
.client => {
// Client-side resources will be written to disk for usage in on the client side
@@ -306,7 +369,6 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
};
},
.server => {
// For Debugging
if (ctx.bundler_options.bake_debug_dump_server) {
_ = file.writeToDisk(root_dir, ".") catch |err| {
bun.handleErrorReturnTrace(err, @errorReturnTrace());
@@ -314,6 +376,13 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
};
}
// If the file has a sourcemap, store it so we can put it on
// `PerThread` so we can provide sourcemapped stacktraces for
// server components.
if (file.source_map_index != std.math.maxInt(u32)) {
try writeSourcemapToDisk(allocator, &file, bundled_outputs, &source_maps);
}
switch (file.output_kind) {
.@"entry-point", .chunk => {
const without_prefix = if (bun.strings.hasPrefixComptime(file.dest_path, "./") or
@@ -330,6 +399,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
}
}
log(" adding module map entry: output_module_map(bake:/{s}) = {d}\n", .{ without_prefix, i });
try output_module_map.put(
allocator,
try std.fmt.allocPrint(allocator, "bake:/{s}", .{without_prefix}),
@@ -342,6 +413,34 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
}
},
}
// TODO: should we just write the sourcemaps to disk?
if (file.source_map_index != std.math.maxInt(u32)) {
try writeSourcemapToDisk(allocator, &file, bundled_outputs, &source_maps);
}
}
// Write the runtime file to disk if there are any client chunks
{
const runtime_file_index = maybe_runtime_file_index orelse {
bun.Output.panic("Runtime file not found. This is an unexpected bug in Bun. Please file a bug report on GitHub.", .{});
};
const any_client_chunks = any_client_chunks: {
for (bundled_outputs) |file| {
if (file.side) |s| {
if (s == .client and !bun.strings.eqlComptime(file.src_path.text, "bun-framework-react/client.tsx")) {
break :any_client_chunks true;
}
}
}
break :any_client_chunks false;
};
if (any_client_chunks) {
const runtime_file: *const OutputFile = &bundled_outputs[runtime_file_index];
_ = runtime_file.writeToDisk(root_dir, ".") catch |err| {
bun.handleErrorReturnTrace(err, @errorReturnTrace());
Output.err(err, "Failed to write {} to output directory", .{bun.fmt.quote(runtime_file.dest_path)});
};
}
}
const per_thread_options: PerThread.Options = .{
@@ -350,9 +449,10 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
.output_indexes = output_indexes,
.module_keys = module_keys,
.module_map = output_module_map,
.source_maps = source_maps,
};
var pt = try PerThread.init(vm, per_thread_options);
pt.* = try PerThread.init(vm, per_thread_options);
pt.attach();
// Static site generator
@@ -417,15 +517,33 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
const css_chunk_js_strings = try allocator.alloc(JSValue, css_chunks_count);
for (bundled_outputs[css_chunks_first..][0..css_chunks_count], css_chunk_js_strings) |output_file, *str| {
bun.assert(output_file.dest_path[0] != '.');
// CSS chunks must be in contiguous order!!
bun.assert(output_file.loader.isCSS());
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
}
// Route URL patterns with parameter placeholders.
// Examples: "/", "/about", "/blog/:slug", "/products/:category/:id"
const route_patterns = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
// File indices for each route's components (page, layouts).
// Example: [2, 5, 0] = page at index 2, layout at 5, root layout at 0
const route_nested_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
// Router type index (lower 8 bits) and flags (upper 24 bits).
// Example: 0x00000001 = router type 1, no flags
const route_type_and_flags = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
// Source file paths relative to project root.
// Examples: "pages/index.tsx", "pages/blog/[slug].tsx"
const route_source_files = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
// Parameter names for dynamic routes (reversed order), null for static routes.
// Examples: ["slug"] for /blog/[slug], ["id", "category"] for /products/[category]/[id]
const route_param_info = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
// CSS chunk URLs for each route.
// Example: ["/assets/main.css", "/assets/blog.css"]
const route_style_references = try JSValue.createEmptyArray(global, navigatable_routes.items.len);
var params_buf: std.ArrayListUnmanaged([]const u8) = .{};
@@ -444,15 +562,18 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
.param => {
params_buf.append(ctx.allocator, route.part.param) catch unreachable;
},
.catch_all, .catch_all_optional => {
.catch_all => {
params_buf.append(ctx.allocator, route.part.catch_all) catch unreachable;
},
.catch_all_optional => {
return global.throw("catch-all routes are not supported in static site generation", .{});
},
else => {},
}
var file_count: u32 = 1;
var css_file_count: u32 = @intCast(main_file.referenced_css_files.len);
var css_file_count: u32 = @intCast(main_file.referenced_css_chunks.len);
if (route.file_layout.unwrap()) |file| {
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
file_count += 1;
}
var next: ?FrameworkRouter.Route.Index = route.parent.unwrap();
@@ -463,13 +584,16 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
.param => {
params_buf.append(ctx.allocator, parent.part.param) catch unreachable;
},
.catch_all, .catch_all_optional => {
.catch_all => {
params_buf.append(ctx.allocator, parent.part.catch_all) catch unreachable;
},
.catch_all_optional => {
return global.throw("catch-all routes are not supported in static site generation", .{});
},
else => {},
}
if (parent.file_layout.unwrap()) |file| {
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
file_count += 1;
}
next = parent.parent.unwrap();
@@ -483,13 +607,13 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
file_count = 1;
css_file_count = 0;
try file_list.putIndex(global, 0, try pt.preloadBundledModule(main_file_route_index));
for (main_file.referenced_css_files) |ref| {
for (main_file.referenced_css_chunks) |ref| {
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
css_file_count += 1;
}
if (route.file_layout.unwrap()) |file| {
try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file));
for (pt.outputFile(file).referenced_css_files) |ref| {
for (pt.outputFile(file).referenced_css_chunks) |ref| {
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
css_file_count += 1;
}
@@ -500,7 +624,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
const parent = router.routePtr(parent_index);
if (parent.file_layout.unwrap()) |file| {
try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file));
for (pt.outputFile(file).referenced_css_files) |ref| {
for (pt.outputFile(file).referenced_css_chunks) |ref| {
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
css_file_count += 1;
}
@@ -520,6 +644,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
try route_nested_files.putIndex(global, @intCast(nav_index), file_list);
try route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{
.type = route.type.get(),
.no_client = main_file.bake_extra.fully_static,
})));
if (params_buf.items.len > 0) {
@@ -570,6 +695,12 @@ fn loadModule(vm: *VirtualMachine, global: *JSC.JSGlobalObject, key: JSValue) !J
const promise = BakeLoadModuleByKey(global, key).asAnyPromise().?.internal;
promise.setHandled(vm.jsc);
vm.waitForPromise(.{ .internal = promise });
// TODO: Specially draining microtasks here because `waitForPromise` has a
// bug which forgets to do it, but I don't want to fix it right now as it
// could affect a lot of the codebase. This should be removed.
vm.eventLoop().drainMicrotasks() catch {
bun.Global.crash();
};
switch (promise.unwrap(vm.jsc, .mark_handled)) {
.pending => unreachable,
.fulfilled => |val| {
@@ -598,23 +729,33 @@ fn BakeGetOnModuleNamespace(global: *JSC.JSGlobalObject, module: JSValue, proper
return result;
}
/// Renders all routes for static site generation by calling the JavaScript implementation.
extern fn BakeRenderRoutesForProdStatic(
*JSC.JSGlobalObject,
/// Output directory path (e.g., "./dist")
out_base: bun.String,
/// Server module paths (e.g., ["bake://page.js", "bake://layout.js"])
all_server_files: JSValue,
/// Framework prerender functions by router type
render_static: JSValue,
/// Framework getParams functions by router type
get_params: JSValue,
/// Client entry URLs by router type (e.g., ["/client.js", null])
client_entry_urls: JSValue,
/// Route patterns (e.g., ["/", "/about", "/blog/:slug"])
patterns: JSValue,
/// File indices per route (e.g., [[0], [1], [2, 0]])
files: JSValue,
/// Packed router type and flags (e.g., [0x00000000, 0x00000001])
type_and_flags: JSValue,
/// Source paths (e.g., ["pages/index.tsx", "pages/blog/[slug].tsx"])
src_route_files: JSValue,
/// Dynamic route params (e.g., [null, null, ["slug"]])
param_information: JSValue,
/// CSS URLs per route (e.g., [["/main.css"], ["/main.css", "/blog.css"]])
styles: JSValue,
) *JSC.JSPromise;
extern fn BakeCreateProdGlobal(console_ptr: *anyopaque) *JSC.JSGlobalObject;
/// The result of this function is a JSValue that wont be garbage collected, as
/// it will always have at least one reference by the module loader.
fn BakeRegisterProductionChunk(global: *JSC.JSGlobalObject, key: bun.String, source_code: bun.String) bun.JSError!JSValue {
@@ -627,6 +768,21 @@ fn BakeRegisterProductionChunk(global: *JSC.JSGlobalObject, key: bun.String, sou
return result;
}
pub export fn BakeToWindowsPath(input: bun.String) callconv(.C) bun.String {
if (comptime bun.Environment.isPosix) {
@panic("This code should not be called on POSIX systems.");
}
var sfa = std.heap.stackFallback(1024, bun.default_allocator);
const alloc = sfa.get();
const input_utf8 = input.toUTF8(alloc);
defer input_utf8.deinit();
const input_slice = input_utf8.slice();
const output = bun.w_path_buffer_pool.get();
defer bun.w_path_buffer_pool.put(output);
const output_slice = bun.strings.toWPathNormalizeAutoExtend(output.*[0..], input_slice);
return bun.String.cloneUTF16(output_slice);
}
pub export fn BakeProdResolve(global: *JSC.JSGlobalObject, a_str: bun.String, specifier_str: bun.String) callconv(.C) bun.String {
var sfa = std.heap.stackFallback(@sizeOf(bun.PathBuffer) * 2, bun.default_allocator);
const alloc = sfa.get();
@@ -653,7 +809,7 @@ pub export fn BakeProdResolve(global: *JSC.JSGlobalObject, a_str: bun.String, sp
return bun.String.createFormat("bake:{s}", .{bun.path.joinAbs(
bun.Dirname.dirname(u8, referrer.slice()[5..]) orelse referrer.slice()[5..],
.auto,
.posix, // force posix paths in bake
specifier.slice(),
)}) catch return bun.String.dead;
}
@@ -743,6 +899,7 @@ pub const PerThread = struct {
module_keys: []const bun.String,
/// Unordered
module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
// Thread-local
vm: *JSC.VirtualMachine,
@@ -761,6 +918,7 @@ pub const PerThread = struct {
module_keys: []const bun.String,
/// Unordered
module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
};
extern fn BakeGlobalObject__attachPerThreadData(global: *JSC.JSGlobalObject, pt: ?*PerThread) void;
@@ -782,6 +940,7 @@ pub const PerThread = struct {
.vm = vm,
.loaded_files = loaded_files,
.all_server_files = all_server_files,
.source_maps = opts.source_maps,
};
}
@@ -807,7 +966,7 @@ pub const PerThread = struct {
}
// Must be run at the top of the event loop
pub fn loadBundledModule(pt: *PerThread, id: OpaqueFileId) bun.JSError!JSValue {
pub fn loadBundledModule(pt: *PerThread, id: OpaqueFileId) !JSValue {
return try loadModule(
pt.vm,
pt.vm.global,
@@ -842,7 +1001,20 @@ pub export fn BakeProdLoad(pt: *PerThread, key: bun.String) bun.String {
const allocator = sfa.get();
const utf8 = key.toUTF8(allocator);
defer utf8.deinit();
log("BakeProdLoad: {s}\n", .{utf8.slice()});
if (pt.module_map.get(utf8.slice())) |value| {
log(" found in module_map: {s}\n", .{utf8.slice()});
return pt.bundled_outputs[value.get()].value.toBunString();
}
return bun.String.dead;
}
pub export fn BakeProdSourceMap(pt: *PerThread, key: bun.String) bun.String {
var sfa = std.heap.stackFallback(4096, bun.default_allocator);
const allocator = sfa.get();
const utf8 = key.toUTF8(allocator);
defer utf8.deinit();
if (pt.source_maps.get(utf8.slice())) |value| {
return pt.bundled_outputs[value.get()].value.toBunString();
}
return bun.String.dead;
@@ -850,25 +1022,30 @@ pub export fn BakeProdLoad(pt: *PerThread, key: bun.String) bun.String {
const TypeAndFlags = packed struct(i32) {
type: u8,
unused: u24 = 0,
/// Don't inclue the runtime client code (e.g.
/// bun-framework-react/client.tsx). This is used if we know a server
/// component does not include any downstream usages of "use client" and so
/// we can omit the client code entirely.
no_client: bool = false,
unused: u23 = 0,
};
fn @"export"() void {
_ = BakeProdResolve;
_ = BakeProdLoad;
}
const std = @import("std");
const bun = @import("bun");
const Environment = bun.Environment;
const Output = bun.Output;
const OutputFile = bun.options.OutputFile;
const bake = bun.bake;
const FrameworkRouter = bake.FrameworkRouter;
const OpaqueFileId = FrameworkRouter.OpaqueFileId;
const OutputFile = bun.options.OutputFile;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;
fn @"export"() void {
_ = BakeProdResolve;
_ = BakeProdLoad;
}
const FrameworkRouter = bake.FrameworkRouter;
const OpaqueFileId = FrameworkRouter.OpaqueFileId;

View File

@@ -1,6 +1,3 @@
const std = @import("std");
const bun = @import("bun");
const mixed_decoder = brk: {
var decoder = zig_base64.standard.decoderWithIgnore("\xff \t\r\n" ++ [_]u8{
std.ascii.control_code.vt,
@@ -556,3 +553,6 @@ const zig_base64 = struct {
} else |err| if (err != error.NoSpaceLeft) return err;
}
};
const bun = @import("bun");
const std = @import("std");

View File

@@ -35,11 +35,6 @@
//! A variant of DynamicBitSet which does not store a pointer to its
//! allocator, in order to save space.
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("bun");
const Environment = bun.Environment;
/// Returns the optimal static bit set type for the specified number
/// of elements. The returned type will perform no allocations,
/// can be copied by value, and does not require deinitialization.
@@ -1578,8 +1573,6 @@ pub const Range = struct {
// ---------------- Tests -----------------
const testing = std.testing;
fn testEql(empty: anytype, full: anytype, len: usize) !void {
try testing.expect(empty.eql(empty));
try testing.expect(full.eql(full));
@@ -1934,3 +1927,10 @@ fn testStaticBitSet(comptime Set: type, comptime Container: @Type(.enum_literal)
try testPureBitSet(Set);
}
const bun = @import("bun");
const Environment = bun.Environment;
const std = @import("std");
const testing = std.testing;
const Allocator = std.mem.Allocator;

View File

@@ -1,16 +1,8 @@
// TODO: move all custom functions from the translated file into this file, then
// the translated file can be provided by `zig translate-c`
const boring = @import("./deps/boringssl.translated.zig");
/// BoringSSL's translated C API
pub const c = boring;
const std = @import("std");
const bun = @import("bun");
const c_ares = @import("./deps/c_ares.zig");
const strings = bun.strings;
const builtin = @import("builtin");
const X509 = @import("./bun.js/api/bun/x509.zig");
var loaded = false;
pub fn load() void {
if (loaded) return;
@@ -211,7 +203,6 @@ pub fn checkServerIdentity(
return false;
}
const JSC = bun.JSC;
pub fn ERR_toJS(globalThis: *JSC.JSGlobalObject, err_code: u32) JSC.JSValue {
var outbuf: [128 + 1 + "BoringSSL ".len]u8 = undefined;
@memset(&outbuf, 0);
@@ -227,3 +218,13 @@ pub fn ERR_toJS(globalThis: *JSC.JSGlobalObject, err_code: u32) JSC.JSValue {
return globalThis.ERR(.BORINGSSL, "{s}", .{error_message}).toJS();
}
const X509 = @import("./bun.js/api/bun/x509.zig");
const boring = @import("./deps/boringssl.translated.zig");
const builtin = @import("builtin");
const c_ares = @import("./deps/c_ares.zig");
const std = @import("std");
const bun = @import("bun");
const JSC = bun.JSC;
const strings = bun.strings;

View File

@@ -1,11 +1,7 @@
const bun = @import("bun");
const std = @import("std");
pub const c = @import("./deps/brotli_c.zig");
const BrotliDecoder = c.BrotliDecoder;
const BrotliEncoder = c.BrotliEncoder;
const mimalloc = bun.Mimalloc;
pub const BrotliAllocator = struct {
pub fn alloc(_: ?*anyopaque, len: usize) callconv(.C) *anyopaque {
if (bun.heap_breakdown.enabled) {
@@ -282,3 +278,8 @@ pub const BrotliCompressionStream = struct {
return this.writerContext(writable).writer();
}
};
const std = @import("std");
const bun = @import("bun");
const mimalloc = bun.Mimalloc;

View File

@@ -1,6 +1,3 @@
const std = @import("std");
const bun = @import("bun");
extern const jsc_llint_begin: u8;
extern const jsc_llint_end: u8;
/// allocated using bun.default_allocator. when called from lldb, it is never freed.
@@ -261,3 +258,6 @@ fn printUnwindError(debug_info: *std.debug.SelfInfo, out_stream: anytype, addres
}
try tty_config.setColor(out_stream, .reset);
}
const bun = @import("bun");
const std = @import("std");

10
src/bun.exe.manifest Normal file
View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"
xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
<asmv3:application>
<asmv3:windowsSettings>
<longPathAware xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">true</longPathAware>
<heapType xmlns="http://schemas.microsoft.com/SMI/2020/WindowsSettings">SegmentHeap</heapType>
</asmv3:windowsSettings>
</asmv3:application>
</assembly>

View File

@@ -1,14 +1,3 @@
const bun = @import("bun");
const logger = bun.logger;
const std = @import("std");
const string = bun.string;
const Resolver = @import("../resolver//resolver.zig").Resolver;
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const default_allocator = bun.default_allocator;
const ZigString = JSC.ZigString;
const JSValue = JSC.JSValue;
pub const BuildMessage = struct {
pub const js = JSC.Codegen.JSBuildMessage;
pub const toJS = js.toJS;
@@ -197,3 +186,16 @@ pub const BuildMessage = struct {
this.msg.deinit(bun.default_allocator);
}
};
const std = @import("std");
const Resolver = @import("../resolver//resolver.zig").Resolver;
const bun = @import("bun");
const default_allocator = bun.default_allocator;
const logger = bun.logger;
const string = bun.string;
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const JSValue = JSC.JSValue;
const ZigString = JSC.ZigString;

View File

@@ -1,25 +1,6 @@
const std = @import("std");
const bun = @import("bun");
const JSC = bun.JSC;
const Output = bun.Output;
const ConsoleObject = @This();
const String = bun.String;
const JSGlobalObject = JSC.JSGlobalObject;
const JSValue = JSC.JSValue;
const strings = bun.strings;
const ZigException = JSC.ZigException;
const ZigString = JSC.ZigString;
const VirtualMachine = JSC.VirtualMachine;
const string = bun.string;
const JSLexer = bun.js_lexer;
const ScriptArguments = opaque {};
const JSPrinter = bun.js_printer;
const Environment = bun.Environment;
const default_allocator = bun.default_allocator;
const JestPrettyFormat = @import("./test/pretty_format.zig").JestPrettyFormat;
const JSPromise = JSC.JSPromise;
const CLI = @import("../cli.zig").Command;
const EventType = JSC.EventType;
/// Default depth for console.log object inspection
/// Only --console-depth CLI flag and console.depth bunfig option should modify this
@@ -3747,3 +3728,26 @@ comptime {
@export(&recordEnd, .{ .name = "Bun__ConsoleObject__recordEnd" });
@export(&screenshot, .{ .name = "Bun__ConsoleObject__screenshot" });
}
const std = @import("std");
const CLI = @import("../cli.zig").Command;
const JestPrettyFormat = @import("./test/pretty_format.zig").JestPrettyFormat;
const bun = @import("bun");
const Environment = bun.Environment;
const JSLexer = bun.js_lexer;
const JSPrinter = bun.js_printer;
const Output = bun.Output;
const String = bun.String;
const default_allocator = bun.default_allocator;
const string = bun.string;
const strings = bun.strings;
const JSC = bun.JSC;
const EventType = JSC.EventType;
const JSGlobalObject = JSC.JSGlobalObject;
const JSPromise = JSC.JSPromise;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;
const ZigException = JSC.ZigException;
const ZigString = JSC.ZigString;

View File

@@ -1,3 +1,5 @@
const Counters = @This();
spawnSync_blocking: i32 = 0,
spawn_memfd: i32 = 0,
@@ -13,9 +15,9 @@ pub fn createCountersObject(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame
return globalObject.bunVM().counters.toJS(globalObject);
}
const Counters = @This();
const Field = std.meta.FieldEnum(Counters);
const std = @import("std");
const bun = @import("bun");
const JSC = bun.JSC;

View File

@@ -427,12 +427,14 @@ pub const BunFrontendDevServerAgent = @import("./api/server/InspectorBunFrontend
pub const HTTPServerAgent = @import("./bindings/HTTPServerAgent.zig");
const std = @import("std");
const bun = @import("bun");
const uv = bun.windows.libuv;
const Output = bun.Output;
const Environment = bun.Environment;
const Output = bun.Output;
const uv = bun.windows.libuv;
const jsc = bun.jsc;
const VirtualMachine = jsc.VirtualMachine;
const ZigException = jsc.ZigException;
const Debugger = jsc.Debugger;
const JSGlobalObject = jsc.JSGlobalObject;
const VirtualMachine = jsc.VirtualMachine;
const ZigException = jsc.ZigException;

View File

@@ -3059,47 +3059,52 @@ export fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool {
return HardcodedModule.Alias.bun_aliases.get(str) != null;
}
const std = @import("std");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const debug = Output.scoped(.ModuleLoader, true);
const Fs = @import("../fs.zig");
const ast = @import("../import_record.zig");
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
const ParseResult = bun.transpiler.ParseResult;
const logger = bun.logger;
const Api = @import("../api/schema.zig").Api;
const options = @import("../options.zig");
const Transpiler = bun.Transpiler;
const PluginRunner = bun.transpiler.PluginRunner;
const js_printer = bun.js_printer;
const js_ast = bun.JSAst;
const Analytics = @import("../analytics/analytics_thread.zig");
const ZigString = bun.JSC.ZigString;
const Runtime = @import("../runtime.zig");
const ImportRecord = ast.ImportRecord;
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const JSC = bun.JSC;
const JSValue = bun.JSC.JSValue;
const node_module_module = @import("./bindings/NodeModuleModule.zig");
const JSGlobalObject = bun.JSC.JSGlobalObject;
const ResolvedSource = bun.JSC.ResolvedSource;
const Bun = JSC.API.Bun;
const PackageManager = @import("../install/install.zig").PackageManager;
const Install = @import("../install/install.zig");
const VirtualMachine = bun.JSC.VirtualMachine;
const Dependency = @import("../install/dependency.zig");
const Async = bun.Async;
const String = bun.String;
const Fs = @import("../fs.zig");
const Runtime = @import("../runtime.zig");
const node_module_module = @import("./bindings/NodeModuleModule.zig");
const std = @import("std");
const Api = @import("../api/schema.zig").Api;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const panic = std.debug.panic;
const ast = @import("../import_record.zig");
const ImportRecord = ast.ImportRecord;
const Install = @import("../install/install.zig");
const PackageManager = @import("../install/install.zig").PackageManager;
const options = @import("../options.zig");
const ModuleType = options.ModuleType;
const debug = Output.scoped(.ModuleLoader, true);
const panic = std.debug.panic;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const bun = @import("bun");
const Async = bun.Async;
const Environment = bun.Environment;
const MutableString = bun.MutableString;
const Output = bun.Output;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const String = bun.String;
const Transpiler = bun.Transpiler;
const js_ast = bun.JSAst;
const js_printer = bun.js_printer;
const logger = bun.logger;
const string = bun.string;
const strings = bun.strings;
const JSC = bun.JSC;
const JSGlobalObject = bun.JSC.JSGlobalObject;
const JSValue = bun.JSC.JSValue;
const ResolvedSource = bun.JSC.ResolvedSource;
const VirtualMachine = bun.JSC.VirtualMachine;
const ZigString = bun.JSC.ZigString;
const Bun = JSC.API.Bun;
const ParseResult = bun.transpiler.ParseResult;
const PluginRunner = bun.transpiler.PluginRunner;
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;

View File

@@ -1,7 +1,6 @@
const ProcessAutoKiller = @This();
const log = bun.Output.scoped(.AutoKiller, true);
const bun = @import("bun");
const std = @import("std");
processes: std.AutoArrayHashMapUnmanaged(*bun.spawn.Process, void) = .{},
enabled: bool = false,
@@ -83,3 +82,6 @@ pub fn deinit(this: *ProcessAutoKiller) void {
}
this.processes.deinit(bun.default_allocator);
}
const bun = @import("bun");
const std = @import("std");

View File

@@ -1,15 +1,3 @@
const bun = @import("bun");
const logger = bun.logger;
const std = @import("std");
const Fs = bun.fs;
const string = bun.string;
const Resolver = @import("../resolver//resolver.zig");
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const strings = bun.strings;
const default_allocator = bun.default_allocator;
const ZigString = JSC.ZigString;
pub const ResolveMessage = struct {
pub const js = JSC.Codegen.JSResolveMessage;
pub const toJS = js.toJS;
@@ -240,3 +228,17 @@ pub const ResolveMessage = struct {
this.msg.deinit(bun.default_allocator);
}
};
const Resolver = @import("../resolver//resolver.zig");
const std = @import("std");
const bun = @import("bun");
const Fs = bun.fs;
const default_allocator = bun.default_allocator;
const logger = bun.logger;
const string = bun.string;
const strings = bun.strings;
const JSC = bun.JSC;
const JSGlobalObject = JSC.JSGlobalObject;
const ZigString = JSC.ZigString;

View File

@@ -14,10 +14,6 @@
/// Version 15: Updated global defines table list.
const expected_version = 15;
const bun = @import("bun");
const std = @import("std");
const Output = bun.Output;
const debug = Output.scoped(.cache, false);
const MINIMUM_CACHE_SIZE = 50 * 1024;
@@ -645,3 +641,8 @@ pub const RuntimeTranspilerCache = struct {
debug("put() = {d} bytes", .{output_code.latin1().len});
}
};
const std = @import("std");
const bun = @import("bun");
const Output = bun.Output;

Some files were not shown because too many files have changed in this diff Show More