mirror of
https://github.com/oven-sh/bun
synced 2026-02-04 16:08:53 +00:00
Compare commits
3 Commits
ciro/fix-a
...
claude/nod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6954e63107 | ||
|
|
a0ed374077 | ||
|
|
39a4c693f8 |
@@ -569,7 +569,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
3
.github/workflows/format.yml
vendored
3
.github/workflows/format.yml
vendored
@@ -46,8 +46,9 @@ jobs:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
./scripts/sort-imports.ts src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
|
||||
47
.github/workflows/vscode-release.yml
vendored
47
.github/workflows/vscode-release.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
// "bun.test.customScript": "./build/debug/bun-debug test"
|
||||
"bun.test.customScript": "bun-debug test"
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~5 minutes. Don't timeout, be patient.
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
@@ -160,7 +160,6 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
|
||||
@@ -752,13 +752,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
{
|
||||
const cppImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
|
||||
});
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
|
||||
10
bun.lock
10
bun.lock
@@ -4,8 +4,6 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -89,14 +87,6 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
|
||||
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
|
||||
@@ -7,6 +7,3 @@
|
||||
# Instead, we can only scan the test directory for Bun's runtime tests
|
||||
root = "test"
|
||||
preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
|
||||
@@ -8,7 +8,6 @@ src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
@@ -18,4 +17,3 @@ src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/linux_memfd_allocator.zig
|
||||
src/allocators/max_heap_allocator.zig
|
||||
src/allocators/memory_allocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc_arena.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
@@ -34,30 +33,20 @@ src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/bake.zig
|
||||
src/baby_list.zig
|
||||
src/bake/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bit_set.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun.js.zig
|
||||
src/bun_js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/dns_resolver.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
@@ -107,7 +96,6 @@ src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -254,6 +242,7 @@ src/bun.js/test/jest.zig
|
||||
src/bun.js/test/pretty_format.zig
|
||||
src/bun.js/test/snapshot.zig
|
||||
src/bun.js/test/test.zig
|
||||
src/bun.js/unbounded_queue.zig
|
||||
src/bun.js/uuid.zig
|
||||
src/bun.js/virtual_machine_exports.zig
|
||||
src/bun.js/VirtualMachine.zig
|
||||
@@ -292,6 +281,7 @@ src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
@@ -315,14 +305,12 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/StaticRouteVisitor.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
@@ -350,6 +338,7 @@ src/cli/install_command.zig
|
||||
src/cli/install_completions_command.zig
|
||||
src/cli/link_command.zig
|
||||
src/cli/list-of-yarn-commands.zig
|
||||
src/cli/NodeVersionCommand.zig
|
||||
src/cli/outdated_command.zig
|
||||
src/cli/pack_command.zig
|
||||
src/cli/package_manager_command.zig
|
||||
@@ -372,11 +361,6 @@ src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -525,6 +509,7 @@ src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/exact_size_matcher.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
@@ -536,8 +521,10 @@ src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/grapheme.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
@@ -632,10 +619,6 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
@@ -644,12 +627,14 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/json_parser.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
@@ -661,6 +646,7 @@ src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/multi_array_list.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
@@ -835,36 +821,34 @@ src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
src/thread_pool.zig
|
||||
src/threading.zig
|
||||
src/threading/channel.zig
|
||||
src/threading/Condition.zig
|
||||
src/threading/Futex.zig
|
||||
src/threading/guarded_value.zig
|
||||
src/threading/Mutex.zig
|
||||
src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/tmp.zig
|
||||
src/toml/toml_lexer.zig
|
||||
src/toml/toml_parser.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
|
||||
@@ -255,10 +255,6 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
|
||||
@@ -312,27 +308,6 @@ set(BUN_JAVASCRIPT_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
)
|
||||
|
||||
set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
COMMENT
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -562,7 +537,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -632,7 +606,6 @@ register_command(
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
bun-cppbind
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -645,6 +618,10 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
@@ -708,7 +685,7 @@ if(WIN32)
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc ${CWD}/src/bun.exe.manifest)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -981,16 +958,6 @@ if(APPLE)
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
# Suppress ALL linker warnings on macOS.
|
||||
# The intent is to only suppress linker alignment warnings.
|
||||
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
|
||||
# If you find one, please update this to only be for linker alignment.
|
||||
-Wl,-w
|
||||
)
|
||||
endif()
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
7118f97c26bf0b2f426728b482f86508efc81d02
|
||||
898dc8319355b7e985f68a9819f182aaed61b53a
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
@@ -20,14 +20,11 @@ register_cmake_command(
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_UNZIP=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_LIBB2=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_WIN32_XMLLITE=OFF
|
||||
-DENABLE_LZ4=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
@@ -88,20 +88,6 @@ The order of the `--target` flag does not matter, as long as they're delimited b
|
||||
|
||||
On x64 platforms, Bun uses SIMD optimizations which require a modern CPU supporting AVX2 instructions. The `-baseline` build of Bun is for older CPUs that don't support these optimizations. Normally, when you install Bun we automatically detect which version to use but this can be harder to do when cross-compiling since you might not know the target CPU. You usually don't need to worry about it on Darwin x64, but it is relevant for Windows x64 and Linux x64. If you or your users see `"Illegal instruction"` errors, you might need to use the baseline version.
|
||||
|
||||
## Build-time constants
|
||||
|
||||
Use the `--define` flag to inject build-time constants into your executable, such as version numbers, build timestamps, or configuration values:
|
||||
|
||||
```bash
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
These constants are embedded directly into your compiled binary at build time, providing zero runtime overhead and enabling dead code elimination optimizations.
|
||||
|
||||
{% callout type="info" %}
|
||||
For comprehensive examples and advanced patterns, see the [Build-time constants guide](/guides/runtime/build-time-constants).
|
||||
{% /callout %}
|
||||
|
||||
## Deploying to production
|
||||
|
||||
Compiled executables reduce memory usage and improve Bun's start time.
|
||||
|
||||
@@ -183,30 +183,6 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
|
||||
}
|
||||
```
|
||||
|
||||
## Installation strategies
|
||||
|
||||
Bun supports two package installation strategies that determine how dependencies are organized in `node_modules`:
|
||||
|
||||
### Hoisted installs (default for single projects)
|
||||
|
||||
The traditional npm/Yarn approach that flattens dependencies into a shared `node_modules` directory:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Isolated installs
|
||||
|
||||
A pnpm-like approach that creates strict dependency isolation to prevent phantom dependencies:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create a central package store in `node_modules/.bun/` with symlinks in the top-level `node_modules`. This ensures packages can only access their declared dependencies.
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -237,15 +213,11 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline:
|
||||
Looking to speed up your CI? Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
@@ -264,31 +236,4 @@ jobs:
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
For CI/CD environments that want to enforce reproducible builds, use `bun ci` to fail the build if the package.json is out of sync with the lockfile:
|
||||
|
||||
```bash
|
||||
$ bun ci
|
||||
```
|
||||
|
||||
This is equivalent to `bun install --frozen-lockfile`. It installs exact versions from `bun.lock` and fails if `package.json` doesn't match the lockfile. To use `bun ci` or `bun install --frozen-lockfile`, you must commit `bun.lock` to version control.
|
||||
|
||||
And instead of running `bun install`, run `bun ci`.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
jobs:
|
||||
build:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies
|
||||
run: bun ci
|
||||
- name: Build app
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="install" /%}
|
||||
|
||||
@@ -258,7 +258,7 @@ Set any of the following environment variables to enable AI-friendly output:
|
||||
|
||||
- `CLAUDECODE=1` - For Claude Code
|
||||
- `REPL_ID=1` - For Replit
|
||||
- `AGENT=1` - Generic AI agent flag
|
||||
- `IS_CODE_AGENT=1` - Generic AI agent flag
|
||||
|
||||
### Behavior
|
||||
|
||||
@@ -267,6 +267,7 @@ When an AI agent environment is detected:
|
||||
- Only test failures are displayed in detail
|
||||
- Passing, skipped, and todo test indicators are hidden
|
||||
- Summary statistics remain intact
|
||||
- JUnit XML reporting is preserved
|
||||
|
||||
```bash
|
||||
# Example: Enable quiet output for Claude Code
|
||||
|
||||
@@ -10,86 +10,6 @@ To update a specific dependency to the latest version:
|
||||
$ bun update [package]
|
||||
```
|
||||
|
||||
## `--interactive`
|
||||
|
||||
For a more controlled update experience, use the `--interactive` flag to select which packages to update:
|
||||
|
||||
```sh
|
||||
$ bun update --interactive
|
||||
$ bun update -i
|
||||
```
|
||||
|
||||
This launches an interactive terminal interface that shows all outdated packages with their current and target versions. You can then select which packages to update.
|
||||
|
||||
### Interactive Interface
|
||||
|
||||
The interface displays packages grouped by dependency type:
|
||||
|
||||
```
|
||||
? Select packages to update - Space to toggle, Enter to confirm, a to select all, n to select none, i to invert, l to toggle latest
|
||||
|
||||
dependencies Current Target Latest
|
||||
□ react 17.0.2 18.2.0 18.3.1
|
||||
□ lodash 4.17.20 4.17.21 4.17.21
|
||||
|
||||
devDependencies Current Target Latest
|
||||
□ typescript 4.8.0 5.0.0 5.3.3
|
||||
□ @types/node 16.11.7 18.0.0 20.11.5
|
||||
|
||||
optionalDependencies Current Target Latest
|
||||
□ some-optional-package 1.0.0 1.1.0 1.2.0
|
||||
```
|
||||
|
||||
**Sections:**
|
||||
|
||||
- Packages are grouped under section headers: `dependencies`, `devDependencies`, `peerDependencies`, `optionalDependencies`
|
||||
- Each section shows column headers aligned with the package data
|
||||
|
||||
**Columns:**
|
||||
|
||||
- **Package**: Package name (may have suffix like ` dev`, ` peer`, ` optional` for clarity)
|
||||
- **Current**: Currently installed version
|
||||
- **Target**: Version that would be installed (respects semver constraints)
|
||||
- **Latest**: Latest available version
|
||||
|
||||
### Keyboard Controls
|
||||
|
||||
**Selection:**
|
||||
|
||||
- **Space**: Toggle package selection
|
||||
- **Enter**: Confirm selections and update
|
||||
- **a/A**: Select all packages
|
||||
- **n/N**: Select none
|
||||
- **i/I**: Invert selection
|
||||
|
||||
**Navigation:**
|
||||
|
||||
- **↑/↓ Arrow keys** or **j/k**: Move cursor
|
||||
- **l/L**: Toggle between target and latest version for current package
|
||||
|
||||
**Exit:**
|
||||
|
||||
- **Ctrl+C** or **Ctrl+D**: Cancel without updating
|
||||
|
||||
### Visual Indicators
|
||||
|
||||
- **☑** Selected packages (will be updated)
|
||||
- **□** Unselected packages
|
||||
- **>** Current cursor position
|
||||
- **Colors**: Red (major), yellow (minor), green (patch) version changes
|
||||
- **Underlined**: Currently selected update target
|
||||
|
||||
### Package Grouping
|
||||
|
||||
Packages are organized in sections by dependency type:
|
||||
|
||||
- **dependencies** - Regular runtime dependencies
|
||||
- **devDependencies** - Development dependencies
|
||||
- **peerDependencies** - Peer dependencies
|
||||
- **optionalDependencies** - Optional dependencies
|
||||
|
||||
Within each section, individual packages may have additional suffixes (` dev`, ` peer`, ` optional`) for extra clarity.
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
@@ -100,8 +20,6 @@ To update to the latest version, regardless of if it's compatible with the curre
|
||||
$ bun update --latest
|
||||
```
|
||||
|
||||
In interactive mode, you can toggle individual packages between their target version (respecting semver) and latest version using the **l** key.
|
||||
|
||||
For example, with the following `package.json`:
|
||||
|
||||
```json
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
---
|
||||
name: Build-time constants with --define
|
||||
---
|
||||
|
||||
The `--define` flag can be used with `bun build` and `bun build --compile` to inject build-time constants into your application. This is especially useful for embedding metadata like build versions, timestamps, or configuration flags directly into your compiled executables.
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/index.ts --outfile myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Why use build-time constants?
|
||||
|
||||
Build-time constants are embedded directly into your compiled code, making them:
|
||||
|
||||
- **Zero runtime overhead** - No environment variable lookups or file reads
|
||||
- **Immutable** - Values are baked into the binary at compile time
|
||||
- **Optimizable** - Dead code elimination can remove unused branches
|
||||
- **Secure** - No external dependencies or configuration files to manage
|
||||
|
||||
This is similar to `gcc -D` or `#define` in C/C++, but for JavaScript/TypeScript.
|
||||
|
||||
---
|
||||
|
||||
## Basic usage
|
||||
|
||||
### With `bun build`
|
||||
|
||||
```sh
|
||||
# Bundle with build-time constants
|
||||
$ bun build --define BUILD_VERSION='"1.0.0"' --define NODE_ENV='"production"' src/index.ts --outdir ./dist
|
||||
```
|
||||
|
||||
### With `bun build --compile`
|
||||
|
||||
```sh
|
||||
# Compile to executable with build-time constants
|
||||
$ bun build --compile --define BUILD_VERSION='"1.0.0"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### JavaScript API
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: '"1.0.0"',
|
||||
BUILD_TIME: '"2024-01-15T10:30:00Z"',
|
||||
DEBUG: "false",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common use cases
|
||||
|
||||
### Version information
|
||||
|
||||
Embed version and build metadata directly into your executable:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/version.ts
|
||||
// These constants are replaced at build time
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const GIT_COMMIT: string;
|
||||
|
||||
export function getVersion() {
|
||||
return {
|
||||
version: BUILD_VERSION,
|
||||
buildTime: BUILD_TIME,
|
||||
commit: GIT_COMMIT,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```sh#Build command
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION='"1.2.3"' \
|
||||
--define BUILD_TIME='"2024-01-15T10:30:00Z"' \
|
||||
--define GIT_COMMIT='"abc123"' \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Feature flags
|
||||
|
||||
Use build-time constants to enable/disable features:
|
||||
|
||||
```ts
|
||||
// Replaced at build time
|
||||
declare const ENABLE_ANALYTICS: boolean;
|
||||
declare const ENABLE_DEBUG: boolean;
|
||||
|
||||
function trackEvent(event: string) {
|
||||
if (ENABLE_ANALYTICS) {
|
||||
// This entire block is removed if ENABLE_ANALYTICS is false
|
||||
console.log("Tracking:", event);
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_DEBUG) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
# Production build - analytics enabled, debug disabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=true --define ENABLE_DEBUG=false src/app.ts --outfile app-prod
|
||||
|
||||
# Development build - both enabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=false --define ENABLE_DEBUG=true src/app.ts --outfile app-dev
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Replace configuration objects at build time:
|
||||
|
||||
```ts
|
||||
declare const CONFIG: {
|
||||
apiUrl: string;
|
||||
timeout: number;
|
||||
retries: number;
|
||||
};
|
||||
|
||||
// CONFIG is replaced with the actual object at build time
|
||||
const response = await fetch(CONFIG.apiUrl, {
|
||||
timeout: CONFIG.timeout,
|
||||
});
|
||||
```
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define 'CONFIG={"apiUrl":"https://api.example.com","timeout":5000,"retries":3}' src/app.ts --outfile app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced patterns
|
||||
|
||||
### Environment-specific builds
|
||||
|
||||
Create different executables for different environments:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build:dev": "bun build --compile --define NODE_ENV='\"development\"' --define API_URL='\"http://localhost:3000\"' src/app.ts --outfile app-dev",
|
||||
"build:staging": "bun build --compile --define NODE_ENV='\"staging\"' --define API_URL='\"https://staging.example.com\"' src/app.ts --outfile app-staging",
|
||||
"build:prod": "bun build --compile --define NODE_ENV='\"production\"' --define API_URL='\"https://api.example.com\"' src/app.ts --outfile app-prod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using shell commands for dynamic values
|
||||
|
||||
Generate build-time constants from shell commands:
|
||||
|
||||
```sh
|
||||
# Use git to get current commit and timestamp
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION="\"$(git describe --tags --always)\"" \
|
||||
--define BUILD_TIME="\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"" \
|
||||
--define GIT_COMMIT="\"$(git rev-parse HEAD)\"" \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### Build automation script
|
||||
|
||||
Create a build script that automatically injects build metadata:
|
||||
|
||||
```ts
|
||||
// build.ts
|
||||
import { $ } from "bun";
|
||||
|
||||
const version = await $`git describe --tags --always`.text();
|
||||
const buildTime = new Date().toISOString();
|
||||
const gitCommit = await $`git rev-parse HEAD`.text();
|
||||
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/cli.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: JSON.stringify(version.trim()),
|
||||
BUILD_TIME: JSON.stringify(buildTime),
|
||||
GIT_COMMIT: JSON.stringify(gitCommit.trim()),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Built with version ${version.trim()}`);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Important considerations
|
||||
|
||||
### Value format
|
||||
|
||||
Values must be valid JSON that will be parsed and inlined as JavaScript expressions:
|
||||
|
||||
```sh
|
||||
# ✅ Strings must be JSON-quoted
|
||||
--define VERSION='"1.0.0"'
|
||||
|
||||
# ✅ Numbers are JSON literals
|
||||
--define PORT=3000
|
||||
|
||||
# ✅ Booleans are JSON literals
|
||||
--define DEBUG=true
|
||||
|
||||
# ✅ Objects and arrays (use single quotes to wrap the JSON)
|
||||
--define 'CONFIG={"host":"localhost","port":3000}'
|
||||
|
||||
# ✅ Arrays work too
|
||||
--define 'FEATURES=["auth","billing","analytics"]'
|
||||
|
||||
# ❌ This won't work - missing quotes around string
|
||||
--define VERSION=1.0.0
|
||||
```
|
||||
|
||||
### Property keys
|
||||
|
||||
You can use property access patterns as keys, not just simple identifiers:
|
||||
|
||||
```sh
|
||||
# ✅ Replace process.env.NODE_ENV with "production"
|
||||
--define 'process.env.NODE_ENV="production"'
|
||||
|
||||
# ✅ Replace process.env.API_KEY with the actual key
|
||||
--define 'process.env.API_KEY="abc123"'
|
||||
|
||||
# ✅ Replace nested properties
|
||||
--define 'window.myApp.version="1.0.0"'
|
||||
|
||||
# ✅ Replace array access
|
||||
--define 'process.argv[2]="--production"'
|
||||
```
|
||||
|
||||
This is particularly useful for environment variables:
|
||||
|
||||
```ts
|
||||
// Before compilation
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After compilation with --define 'process.env.NODE_ENV="production"'
|
||||
if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After optimization
|
||||
console.log("Production mode");
|
||||
```
|
||||
|
||||
### TypeScript declarations
|
||||
|
||||
For TypeScript projects, declare your constants to avoid type errors:
|
||||
|
||||
```ts
|
||||
// types/build-constants.d.ts
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const NODE_ENV: "development" | "staging" | "production";
|
||||
declare const DEBUG: boolean;
|
||||
```
|
||||
|
||||
### Cross-platform compatibility
|
||||
|
||||
When building for multiple platforms, constants work the same way:
|
||||
|
||||
```sh
|
||||
# Linux
|
||||
$ bun build --compile --target=bun-linux-x64 --define PLATFORM='"linux"' src/app.ts --outfile app-linux
|
||||
|
||||
# macOS
|
||||
$ bun build --compile --target=bun-darwin-x64 --define PLATFORM='"darwin"' src/app.ts --outfile app-macos
|
||||
|
||||
# Windows
|
||||
$ bun build --compile --target=bun-windows-x64 --define PLATFORM='"windows"' src/app.ts --outfile app-windows.exe
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related
|
||||
|
||||
- [Define constants at runtime](/guides/runtime/define-constant) - Using `--define` with `bun run`
|
||||
- [Building executables](/bundler/executables) - Complete guide to `bun build --compile`
|
||||
- [Bundler API](/bundler) - Full bundler documentation including `define` option
|
||||
@@ -81,14 +81,6 @@ $ bun install --verbose # debug logging
|
||||
$ bun install --silent # no logging
|
||||
```
|
||||
|
||||
To use isolated installs instead of the default hoisted strategy:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create strict dependency isolation similar to pnpm, preventing phantom dependencies and ensuring more deterministic builds. For complete documentation, see [Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
@@ -118,10 +110,6 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
Bun provides an alternative package installation strategy called **isolated installs** that creates strict dependency isolation similar to pnpm's approach. This mode prevents phantom dependencies and ensures reproducible, deterministic builds.
|
||||
|
||||
## What are isolated installs?
|
||||
|
||||
Isolated installs create a non-hoisted dependency structure where packages can only access their explicitly declared dependencies. This differs from the traditional "hoisted" installation strategy used by npm and Yarn, where dependencies are flattened into a shared `node_modules` directory.
|
||||
|
||||
### Key benefits
|
||||
|
||||
- **Prevents phantom dependencies** — Packages cannot accidentally import dependencies they haven't declared
|
||||
- **Deterministic resolution** — Same dependency tree regardless of what else is installed
|
||||
- **Better for monorepos** — Workspace isolation prevents cross-contamination between packages
|
||||
- **Reproducible builds** — More predictable resolution behavior across environments
|
||||
|
||||
## Using isolated installs
|
||||
|
||||
### Command line
|
||||
|
||||
Use the `--linker` flag to specify the installation strategy:
|
||||
|
||||
```bash
|
||||
# Use isolated installs
|
||||
$ bun install --linker isolated
|
||||
|
||||
# Use traditional hoisted installs
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
|
||||
Set the default linker strategy in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linker = "isolated"
|
||||
```
|
||||
|
||||
### Default behavior
|
||||
|
||||
By default, Bun uses the **hoisted** installation strategy for all projects. To use isolated installs, you must explicitly specify the `--linker isolated` flag or set it in your configuration file.
|
||||
|
||||
## How isolated installs work
|
||||
|
||||
### Directory structure
|
||||
|
||||
Instead of hoisting dependencies, isolated installs create a two-tier structure:
|
||||
|
||||
```
|
||||
node_modules/
|
||||
├── .bun/ # Central package store
|
||||
│ ├── package@1.0.0/ # Versioned package installations
|
||||
│ │ └── node_modules/
|
||||
│ │ └── package/ # Actual package files
|
||||
│ ├── @scope+package@2.1.0/ # Scoped packages (+ replaces /)
|
||||
│ │ └── node_modules/
|
||||
│ │ └── @scope/
|
||||
│ │ └── package/
|
||||
│ └── ...
|
||||
└── package-name -> .bun/package@1.0.0/node_modules/package # Symlinks
|
||||
```
|
||||
|
||||
### Resolution algorithm
|
||||
|
||||
1. **Central store** — All packages are installed in `node_modules/.bun/package@version/` directories
|
||||
2. **Symlinks** — Top-level `node_modules` contains symlinks pointing to the central store
|
||||
3. **Peer resolution** — Complex peer dependencies create specialized directory names
|
||||
4. **Deduplication** — Packages with identical package IDs and peer dependency sets are shared
|
||||
|
||||
### Workspace handling
|
||||
|
||||
In monorepos, workspace dependencies are handled specially:
|
||||
|
||||
- **Workspace packages** — Symlinked directly to their source directories, not the store
|
||||
- **Workspace dependencies** — Can access other workspace packages in the monorepo
|
||||
- **External dependencies** — Installed in the isolated store with proper isolation
|
||||
|
||||
## Comparison with hoisted installs
|
||||
|
||||
| Aspect | Hoisted (npm/Yarn) | Isolated (pnpm-like) |
|
||||
| ------------------------- | ------------------------------------------ | --------------------------------------- |
|
||||
| **Dependency access** | Packages can access any hoisted dependency | Packages only see declared dependencies |
|
||||
| **Phantom dependencies** | ❌ Possible | ✅ Prevented |
|
||||
| **Disk usage** | ✅ Lower (shared installs) | ✅ Similar (uses symlinks) |
|
||||
| **Determinism** | ❌ Less deterministic | ✅ More deterministic |
|
||||
| **Node.js compatibility** | ✅ Standard behavior | ✅ Compatible via symlinks |
|
||||
| **Best for** | Single projects, legacy code | Monorepos, strict dependency management |
|
||||
|
||||
## Advanced features
|
||||
|
||||
### Peer dependency handling
|
||||
|
||||
Isolated installs handle peer dependencies through sophisticated resolution:
|
||||
|
||||
```bash
|
||||
# Package with peer dependencies creates specialized paths
|
||||
node_modules/.bun/package@1.0.0_react@18.2.0/
|
||||
```
|
||||
|
||||
The directory name encodes both the package version and its peer dependency versions, ensuring each unique combination gets its own installation.
|
||||
|
||||
### Backend strategies
|
||||
|
||||
Bun uses different file operation strategies for performance:
|
||||
|
||||
- **Clonefile** (macOS) — Copy-on-write filesystem clones for maximum efficiency
|
||||
- **Hardlink** (Linux/Windows) — Hardlinks to save disk space
|
||||
- **Copyfile** (fallback) — Full file copies when other methods aren't available
|
||||
|
||||
### Debugging isolated installs
|
||||
|
||||
Enable verbose logging to understand the installation process:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated --verbose
|
||||
```
|
||||
|
||||
This shows:
|
||||
|
||||
- Store entry creation
|
||||
- Symlink operations
|
||||
- Peer dependency resolution
|
||||
- Deduplication decisions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Compatibility issues
|
||||
|
||||
Some packages may not work correctly with isolated installs due to:
|
||||
|
||||
- **Hardcoded paths** — Packages that assume a flat `node_modules` structure
|
||||
- **Dynamic imports** — Runtime imports that don't follow Node.js resolution
|
||||
- **Build tools** — Tools that scan `node_modules` directly
|
||||
|
||||
If you encounter issues, you can:
|
||||
|
||||
1. **Switch to hoisted mode** for specific projects:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
2. **Report compatibility issues** to help improve isolated install support
|
||||
|
||||
### Performance considerations
|
||||
|
||||
- **Install time** — May be slightly slower due to symlink operations
|
||||
- **Disk usage** — Similar to hoisted (uses symlinks, not file copies)
|
||||
- **Memory usage** — Higher during install due to complex peer resolution
|
||||
|
||||
## Migration guide
|
||||
|
||||
### From npm/Yarn
|
||||
|
||||
```bash
|
||||
# Remove existing node_modules and lockfiles
|
||||
$ rm -rf node_modules package-lock.json yarn.lock
|
||||
|
||||
# Install with isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
### From pnpm
|
||||
|
||||
Isolated installs are conceptually similar to pnpm, so migration should be straightforward:
|
||||
|
||||
```bash
|
||||
# Remove pnpm files
|
||||
$ rm -rf node_modules pnpm-lock.yaml
|
||||
|
||||
# Install with Bun's isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
The main difference is that Bun uses symlinks in `node_modules` while pnpm uses a global store with symlinks.
|
||||
|
||||
## When to use isolated installs
|
||||
|
||||
**Use isolated installs when:**
|
||||
|
||||
- Working in monorepos with multiple packages
|
||||
- Strict dependency management is required
|
||||
- Preventing phantom dependencies is important
|
||||
- Building libraries that need deterministic dependencies
|
||||
|
||||
**Use hoisted installs when:**
|
||||
|
||||
- Working with legacy code that assumes flat `node_modules`
|
||||
- Compatibility with existing build tools is required
|
||||
- Working in environments where symlinks aren't well supported
|
||||
- You prefer the simpler traditional npm behavior
|
||||
|
||||
## Related documentation
|
||||
|
||||
- [Package manager > Workspaces](https://bun.com/docs/install/workspaces) — Monorepo workspace management
|
||||
- [Package manager > Lockfile](https://bun.com/docs/install/lockfile) — Understanding Bun's lockfile format
|
||||
- [CLI > install](https://bun.com/docs/cli/install) — Complete `bun install` command reference
|
||||
@@ -183,9 +183,6 @@ export default {
|
||||
description:
|
||||
"Bun's package manager installs all packages into a shared global cache to avoid redundant re-downloads.",
|
||||
}),
|
||||
page("install/isolated", "Isolated installs", {
|
||||
description: "Create strict dependency isolation, preventing phantom dependencies.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and monorepo development workflows.",
|
||||
}),
|
||||
|
||||
@@ -20,7 +20,7 @@ this one:
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *jsc.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
@@ -33,7 +33,7 @@ const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
@@ -2,15 +2,16 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const Features = bun.analytics.Features;
|
||||
const C = bun.C;
|
||||
const Features = @import("../src/analytics/analytics_thread.zig").Features;
|
||||
|
||||
// zig run --main-pkg-path ../ ./features.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
@@ -195,7 +196,7 @@ pub fn main() anyerror!void {
|
||||
response_body: MutableString = undefined,
|
||||
context: HTTP.HTTPChannelContext = undefined,
|
||||
};
|
||||
const Batch = bun.ThreadPool.Batch;
|
||||
const Batch = @import("../src/thread_pool.zig").Batch;
|
||||
var groups = try default_allocator.alloc(Group, args.count);
|
||||
var repeat_i: usize = 0;
|
||||
while (repeat_i < args.repeat + 1) : (repeat_i += 1) {
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
// most of this file is copy pasted from other files in misctools
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
// zig build-exe -Doptimize=ReleaseFast --main-pkg-path ../ ./readlink-getfd.zig
|
||||
pub fn main() anyerror!void {
|
||||
|
||||
@@ -2,14 +2,15 @@ const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("bun");
|
||||
const string = []const u8;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const MutableString = bun.MutableString;
|
||||
const stringZ = [:0]const u8;
|
||||
const stringZ = bun.stringZ;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const C = bun.C;
|
||||
|
||||
const Archive = @import("../src/libarchive/libarchive.zig").Archive;
|
||||
const Zlib = @import("../src/zlib.zig");
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.20",
|
||||
"version": "1.2.19",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -30,8 +28,8 @@
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"build:debug": "bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",
|
||||
|
||||
@@ -27,17 +27,11 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
- Run scripts from package.json
|
||||
- Visual lockfile viewer for old binary lockfiles (`bun.lockb`)
|
||||
|
||||
## Bun test runner integration
|
||||
|
||||
Run and debug tests directly from VSCode's Testing panel. The extension automatically discovers test files, shows inline test status, and provides rich error messages with diffs.
|
||||
|
||||

|
||||
|
||||
## In-editor error messages
|
||||
|
||||
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
|
||||
|
||||

|
||||

|
||||
|
||||
<div align="center">
|
||||
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 6.7 MiB |
@@ -102,6 +102,8 @@
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ=="],
|
||||
|
||||
"@types/xml2js": ["@types/xml2js@0.4.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ=="],
|
||||
|
||||
"@vscode/debugadapter": ["@vscode/debugadapter@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-VDGLUFDVAdnftUebZe4uQCIFUbJ7rTc2Grps4D/CXl+qyzTZSQLv5VADEOZ6kBYG4SvlnMLql5vPQ0G6XvUCvQ=="],
|
||||
|
||||
"@vscode/debugadapter-testsupport": ["@vscode/debugadapter-testsupport@1.61.0", "", { "dependencies": { "@vscode/debugprotocol": "1.61.0" } }, "sha512-M/8aNX1aFvupd+SP0NLEVLKUK9y52BuCK5vKO2gzdpSoRUR2fR8oFbGkTie+/p2Yrcswnuf7hFx0xWkV9avRdg=="],
|
||||
|
||||
|
Before Width: | Height: | Size: 462 KiB After Width: | Height: | Size: 462 KiB |
@@ -10,13 +10,15 @@
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.1.10",
|
||||
"@types/vscode": "^1.60.0",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@vscode/debugadapter": "^1.56.0",
|
||||
"@vscode/debugadapter-testsupport": "^1.56.0",
|
||||
"@vscode/test-cli": "^0.0.10",
|
||||
"@vscode/test-electron": "^2.4.1",
|
||||
"@vscode/vsce": "^2.20.1",
|
||||
"esbuild": "^0.19.2",
|
||||
"typescript": "^5.0.0"
|
||||
"typescript": "^5.0.0",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"activationEvents": [
|
||||
"onStartupFinished"
|
||||
@@ -71,7 +73,7 @@
|
||||
},
|
||||
"bun.test.filePattern": {
|
||||
"type": "string",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts,cjs,mjs}",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
"description": "Glob pattern to find test files"
|
||||
},
|
||||
"bun.test.customFlag": {
|
||||
@@ -81,14 +83,8 @@
|
||||
},
|
||||
"bun.test.customScript": {
|
||||
"type": "string",
|
||||
"default": "bun test",
|
||||
"default": "",
|
||||
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
|
||||
},
|
||||
"bun.test.enable": {
|
||||
"type": "boolean",
|
||||
"description": "If the test explorer should be enabled and integrated with your editor",
|
||||
"scope": "window",
|
||||
"default": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { MockTestController, MockWorkspaceFolder } from "./vscode-types.mock";
|
||||
import "./vscode.mock";
|
||||
import { makeTestController, makeWorkspaceFolder } from "./vscode.mock";
|
||||
|
||||
const { BunTestController } = await import("../bun-test-controller");
|
||||
|
||||
const mockTestController: MockTestController = makeTestController();
|
||||
const mockWorkspaceFolder: MockWorkspaceFolder = makeWorkspaceFolder("/test/workspace");
|
||||
|
||||
const controller = new BunTestController(mockTestController, mockWorkspaceFolder, true);
|
||||
const internal = controller._internal;
|
||||
|
||||
const { expandEachTests, parseTestBlocks, getBraceDepth } = internal;
|
||||
|
||||
describe("BunTestController (static file parser)", () => {
|
||||
describe("expandEachTests", () => {
|
||||
describe("$variable syntax", () => {
|
||||
test("should not expand $variable patterns (Bun behavior)", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1, b: 2, expected: 3 },
|
||||
{ a: 5, b: 5, expected: 10 }
|
||||
])('$a + $b = $expected', ({ a, b, expected }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a + $b = $expected", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a + $b = $expected");
|
||||
});
|
||||
|
||||
test("should not expand string values with quotes", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Alice", city: "NYC" },
|
||||
{ name: "Bob", city: "LA" }
|
||||
])('$name from $city', ({ name, city }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name from $city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name from $city");
|
||||
});
|
||||
|
||||
test("should not expand nested property access", () => {
|
||||
const content = `test.each([
|
||||
{ user: { name: "Alice", profile: { city: "NYC" } } },
|
||||
{ user: { name: "Bob", profile: { city: "LA" } } }
|
||||
])('$user.name from $user.profile.city', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$user.name from $user.profile.city", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$user.name from $user.profile.city");
|
||||
});
|
||||
|
||||
test("should not expand array indexing", () => {
|
||||
const content = `test.each([
|
||||
{ users: [{ name: "Alice" }, { name: "Bob" }] },
|
||||
{ users: [{ name: "Carol" }, { name: "Dave" }] }
|
||||
])('first user: $users.0.name', ({ users }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "first user: $users.0.name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("first user: $users.0.name");
|
||||
});
|
||||
|
||||
test("should return template as-is for missing properties", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 }
|
||||
])('$a and $missing', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$a and $missing", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$a and $missing");
|
||||
});
|
||||
|
||||
test("should handle edge cases with special identifiers", () => {
|
||||
const content = `test.each([
|
||||
{ _valid: "ok", $dollar: "yes", _123mix: "mixed" }
|
||||
])('$_valid | $$dollar | $_123mix', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$_valid | $$dollar | $_123mix", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$_valid | $$dollar | $_123mix");
|
||||
});
|
||||
|
||||
test("should handle invalid identifiers as literals", () => {
|
||||
const content = `test.each([
|
||||
{ valid: "test" }
|
||||
])('$valid | $123invalid | $has-dash', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$valid | $123invalid | $has-dash", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$valid | $123invalid | $has-dash");
|
||||
});
|
||||
});
|
||||
|
||||
describe("% formatters", () => {
|
||||
test("should handle %i for integers", () => {
|
||||
const content = `test.each([
|
||||
[1, 2, 3],
|
||||
[5, 5, 10]
|
||||
])('%i + %i = %i', (a, b, expected) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i + %i = %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1 + 2 = 3");
|
||||
expect(result[1].name).toBe("5 + 5 = 10");
|
||||
});
|
||||
|
||||
test("should handle %s for strings", () => {
|
||||
const content = `test.each([
|
||||
["hello", "world"],
|
||||
["foo", "bar"]
|
||||
])('%s %s', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("hello world");
|
||||
expect(result[1].name).toBe("foo bar");
|
||||
});
|
||||
|
||||
test("should handle %f and %d for numbers", () => {
|
||||
const content = `test.each([
|
||||
[1.5, 2.7],
|
||||
[3.14, 2.71]
|
||||
])('%f and %d', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%f and %d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("1.5 and 2.7");
|
||||
expect(result[1].name).toBe("3.14 and 2.71");
|
||||
});
|
||||
|
||||
test("should handle %o and %j for objects", () => {
|
||||
const content = `test.each([
|
||||
[{ a: 1 }, { b: 2 }]
|
||||
])('%o and %j', (obj1, obj2) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%o and %j", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("%o and %j");
|
||||
});
|
||||
|
||||
test("should handle %# for index", () => {
|
||||
const content = `test.each([
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
[5, 6]
|
||||
])('Test #%#: %i + %i', (a, b) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test #%#: %i + %i", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("Test #1: 1 + 2");
|
||||
expect(result[1].name).toBe("Test #2: 3 + 4");
|
||||
expect(result[2].name).toBe("Test #3: 5 + 6");
|
||||
});
|
||||
|
||||
test("should handle %% for literal percent", () => {
|
||||
const content = `test.each([
|
||||
[50],
|
||||
[100]
|
||||
])('%i%% complete', (percent) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "%i%% complete", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("50% complete");
|
||||
expect(result[1].name).toBe("100% complete");
|
||||
});
|
||||
});
|
||||
|
||||
describe("describe.each", () => {
|
||||
test("should work with describe.each", () => {
|
||||
const content = `describe.each([
|
||||
{ module: "fs", method: "readFile" },
|
||||
{ module: "path", method: "join" }
|
||||
])('$module module', ({ module, method }) => {})`;
|
||||
|
||||
const result = expandEachTests("describe.each([", "$module module", content, 0, "describe", 1);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$module module");
|
||||
expect(result[0].type).toBe("describe");
|
||||
});
|
||||
});
|
||||
|
||||
describe("error handling", () => {
|
||||
test("should handle non-.each tests", () => {
|
||||
const result = expandEachTests("test", "regular test", "test('regular test', () => {})", 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("regular test");
|
||||
});
|
||||
|
||||
test("should handle malformed JSON", () => {
|
||||
const content = `test.each([
|
||||
{ invalid json }
|
||||
])('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
|
||||
test("should handle non-array values", () => {
|
||||
const content = `test.each({ not: "array" })('test', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("mixed formatters", () => {
|
||||
test("should handle both $ and % in objects", () => {
|
||||
const content = `test.each([
|
||||
{ name: "Test", index: 0 }
|
||||
])('$name #%#', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name #%#", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name #%#");
|
||||
});
|
||||
});
|
||||
|
||||
describe("edge cases", () => {
|
||||
test("should handle complex nested objects", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
user: {
|
||||
profile: {
|
||||
address: {
|
||||
city: "NYC",
|
||||
coords: { lat: 40.7128, lng: -74.0060 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('User from $user.profile.address.city at $user.profile.address.coords.lat', ({ user }) => {})`;
|
||||
|
||||
const result = expandEachTests(
|
||||
"test.each([",
|
||||
"User from $user.profile.address.city at $user.profile.address.coords.lat",
|
||||
content,
|
||||
0,
|
||||
"test",
|
||||
1,
|
||||
);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("User from $user.profile.address.city at $user.profile.address.coords.lat");
|
||||
});
|
||||
|
||||
test("should handle arrays with inline comments", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }, // first test
|
||||
{ a: 2 }, // second test
|
||||
// { a: 3 }, // commented out test
|
||||
{ a: 4 } /* final test */
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with multiline comments", () => {
|
||||
const content = `test.each([
|
||||
{ name: "test1" },
|
||||
/* This is a
|
||||
multiline comment
|
||||
that spans several lines */
|
||||
{ name: "test2" },
|
||||
/**
|
||||
* JSDoc style comment
|
||||
* with multiple lines
|
||||
*/
|
||||
{ name: "test3" }
|
||||
])('$name', ({ name }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "$name", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("$name");
|
||||
});
|
||||
|
||||
test("should handle malformed array syntax gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2,,, }, // extra commas
|
||||
{ a: 3, }, // trailing comma
|
||||
{ a: 4 },,, // extra trailing commas
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle strings with comment-like content", () => {
|
||||
const content = `test.each([
|
||||
{ comment: "// this is not a comment" },
|
||||
{ comment: "/* neither is this */" },
|
||||
{ url: "https://example.com/path" }
|
||||
])('Test: $comment $url', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Test: $comment $url", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Test: $comment $url");
|
||||
});
|
||||
|
||||
test("should handle special characters in strings", () => {
|
||||
const content = `test.each([
|
||||
{ char: "\\n" },
|
||||
{ char: "\\t" },
|
||||
{ char: "\\"" },
|
||||
{ char: "\\'" },
|
||||
{ char: "\\\\" },
|
||||
{ char: "\`" }
|
||||
])('Special char: $char', ({ char }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Special char: $char", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should handle empty arrays", () => {
|
||||
const content = `test.each([])('should handle empty', () => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "should handle empty", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("should handle undefined and null values", () => {
|
||||
const content = `test.each([
|
||||
{ value: undefined },
|
||||
{ value: null },
|
||||
{ value: false },
|
||||
{ value: 0 },
|
||||
{ value: "" }
|
||||
])('Value: $value', ({ value }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $value", content, 0, "test", 1);
|
||||
|
||||
if (result.length === 1) {
|
||||
expect(result[0].name).toBe("Value: $value");
|
||||
} else {
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("Value: undefined");
|
||||
expect(result[1].name).toBe("Value: null");
|
||||
expect(result[2].name).toBe("Value: false");
|
||||
expect(result[3].name).toBe("Value: 0");
|
||||
expect(result[4].name).toBe("Value: ");
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle circular references gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: "[Circular]" } },
|
||||
{ a: { b: { c: "[Circular]" } } }
|
||||
])('Circular: $a.b', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Circular: $a.b", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Circular: $a.b");
|
||||
});
|
||||
|
||||
test("should handle very long property paths", () => {
|
||||
const content = `test.each([
|
||||
{
|
||||
a: {
|
||||
b: {
|
||||
c: {
|
||||
d: {
|
||||
e: {
|
||||
f: {
|
||||
g: "deeply nested"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
])('Value: $a.b.c.d.e.f.g', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Value: $a.b.c.d.e.f.g", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Value: $a.b.c.d.e.f.g");
|
||||
});
|
||||
|
||||
test("should handle syntax errors in array", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 }
|
||||
{ a: 2 } // missing comma
|
||||
{ a: 3 }
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle arrays with trailing commas", () => {
|
||||
const content = `test.each([
|
||||
{ a: 1 },
|
||||
{ a: 2 },
|
||||
])('test $a', ({ a }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test $a", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test $a");
|
||||
});
|
||||
|
||||
test("should handle mixed data types in arrays", () => {
|
||||
const content = `test.each([
|
||||
["string", 123, true, null, undefined],
|
||||
[{ obj: true }, [1, 2, 3], new Date("2024-01-01")]
|
||||
])('test %s %i %s %s %s', (...args) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "test %s %i %s %s %s", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("test %s %i %s %s %s");
|
||||
});
|
||||
|
||||
test("should handle regex-like strings", () => {
|
||||
const content = `test.each([
|
||||
{ pattern: "/^test.*$/" },
|
||||
{ pattern: "\\\\d{3}-\\\\d{4}" },
|
||||
{ pattern: "[a-zA-Z]+" }
|
||||
])('Pattern: $pattern', ({ pattern }) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Pattern: $pattern", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Pattern: $pattern");
|
||||
});
|
||||
|
||||
test("should handle invalid property access gracefully", () => {
|
||||
const content = `test.each([
|
||||
{ a: { b: null } },
|
||||
{ a: null },
|
||||
{ },
|
||||
{ a: { } }
|
||||
])('Access: $a.b.c.d', (data) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Access: $a.b.c.d", content, 0, "test", 1);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Access: $a.b.c.d");
|
||||
});
|
||||
|
||||
test("should handle object methods and computed properties", () => {
|
||||
const content = `test.each([
|
||||
{ fn: function() {}, method() {}, arrow: () => {} },
|
||||
{ ["computed"]: "value", [Symbol.for("sym")]: "symbol" }
|
||||
])('Object with methods', (obj) => {})`;
|
||||
|
||||
const result = expandEachTests("test.each([", "Object with methods", content, 0, "test", 1);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseTestBlocks", () => {
|
||||
test("should parse simple test blocks", () => {
|
||||
const content = `
|
||||
test("should add numbers", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("should multiply numbers", () => {
|
||||
expect(2 * 3).toBe(6);
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should add numbers");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should multiply numbers");
|
||||
expect(result[1].type).toBe("test");
|
||||
});
|
||||
|
||||
test("should parse describe blocks with nested tests", () => {
|
||||
const content = `
|
||||
describe("Math operations", () => {
|
||||
test("addition", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
|
||||
test("subtraction", () => {
|
||||
expect(5 - 3).toBe(2);
|
||||
});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Math operations");
|
||||
expect(result[0].type).toBe("describe");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("addition");
|
||||
expect(result[0].children[1].name).toBe("subtraction");
|
||||
});
|
||||
|
||||
test("should handle test modifiers", () => {
|
||||
const content = `
|
||||
test.skip("skipped test", () => {});
|
||||
test.todo("todo test", () => {});
|
||||
test.only("only test", () => {});
|
||||
test.failing("failing test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("skipped test");
|
||||
expect(result[1].name).toBe("todo test");
|
||||
expect(result[2].name).toBe("only test");
|
||||
expect(result[3].name).toBe("failing test");
|
||||
});
|
||||
|
||||
test("should handle conditional tests", () => {
|
||||
const content = `
|
||||
test.if(true)("conditional test", () => {});
|
||||
test.skipIf(false)("skip if test", () => {});
|
||||
test.todoIf(true)("todo if test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("conditional test");
|
||||
expect(result[1].name).toBe("skip if test");
|
||||
expect(result[2].name).toBe("todo if test");
|
||||
});
|
||||
|
||||
test("should ignore comments", () => {
|
||||
const content = `
|
||||
// This is a comment with test("fake test", () => {})
|
||||
/* Multi-line comment
|
||||
test("another fake test", () => {})
|
||||
*/
|
||||
test("real test", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("real test");
|
||||
});
|
||||
|
||||
test("should handle nested describe blocks", () => {
|
||||
const content = `
|
||||
describe("Outer", () => {
|
||||
describe("Inner", () => {
|
||||
test("deeply nested", () => {});
|
||||
});
|
||||
test("shallow test", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("Outer");
|
||||
expect(result[0].children).toHaveLength(2);
|
||||
expect(result[0].children[0].name).toBe("Inner");
|
||||
expect(result[0].children[0].children).toHaveLength(1);
|
||||
expect(result[0].children[0].children[0].name).toBe("deeply nested");
|
||||
expect(result[0].children[1].name).toBe("shallow test");
|
||||
});
|
||||
|
||||
test("should handle it() as alias for test()", () => {
|
||||
const content = `
|
||||
it("should work with it", () => {});
|
||||
it.skip("should skip with it", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe("should work with it");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("should skip with it");
|
||||
});
|
||||
|
||||
test("should handle different quote types", () => {
|
||||
const content = `
|
||||
test('single quotes', () => {});
|
||||
test("double quotes", () => {});
|
||||
test(\`template literals\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("single quotes");
|
||||
expect(result[1].name).toBe("double quotes");
|
||||
expect(result[2].name).toBe("template literals");
|
||||
});
|
||||
|
||||
test("should handle escaped quotes in test names", () => {
|
||||
const content = `
|
||||
test("test with \\"escaped\\" quotes", () => {});
|
||||
test('test with \\'escaped\\' quotes', () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe('test with "escaped" quotes');
|
||||
expect(result[1].name).toBe("test with 'escaped' quotes");
|
||||
});
|
||||
|
||||
test("should handle comments within test names", () => {
|
||||
const content = `
|
||||
test("test with // comment syntax", () => {});
|
||||
test("test with /* comment */ syntax", () => {});
|
||||
test("test with URL https://example.com", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const hasCommentSyntax = result.some(r => r.name.includes("comment syntax"));
|
||||
const hasURL = result.some(r => r.name.includes("https://example.com"));
|
||||
|
||||
expect(hasCommentSyntax || hasURL).toBe(true);
|
||||
});
|
||||
|
||||
test("should ignore code that looks like tests in strings", () => {
|
||||
const content = `
|
||||
const str = "test('fake test', () => {})";
|
||||
const template = \`describe("fake describe", () => {})\`;
|
||||
|
||||
// Real test
|
||||
test("real test", () => {
|
||||
const example = 'test("nested fake", () => {})';
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result.some(r => r.name === "real test")).toBe(true);
|
||||
});
|
||||
|
||||
test("should handle tests with complex modifier chains", () => {
|
||||
const content = `
|
||||
test.skip.failing("skipped failing test", () => {});
|
||||
test.only.todo("only todo test", () => {});
|
||||
describe.skip.each([1, 2])("skip each %i", (n) => {});
|
||||
it.failing.each([{a: 1}])("failing each $a", ({a}) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("should handle weird spacing and formatting", () => {
|
||||
const content = `
|
||||
test ( "extra spaces" , ( ) => { } ) ;
|
||||
test
|
||||
(
|
||||
"multiline test"
|
||||
,
|
||||
(
|
||||
)
|
||||
=>
|
||||
{
|
||||
}
|
||||
)
|
||||
;
|
||||
test\t(\t"tabs"\t,\t()\t=>\t{}\t);
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("extra spaces");
|
||||
expect(result[1].name).toBe("multiline test");
|
||||
expect(result[2].name).toBe("tabs");
|
||||
});
|
||||
|
||||
test("should handle test.each with complex patterns", () => {
|
||||
const content = `
|
||||
test.each([
|
||||
[1, 2, 3],
|
||||
[4, 5, 9]
|
||||
])("when %i + %i, result should be %i", (a, b, expected) => {});
|
||||
|
||||
describe.each([
|
||||
{ db: "postgres" },
|
||||
{ db: "mysql" }
|
||||
])("Database $db", ({ db }) => {
|
||||
test("should connect", () => {});
|
||||
});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].name).toBe("when 1 + 2, result should be 3");
|
||||
expect(result[0].type).toBe("test");
|
||||
expect(result[1].name).toBe("when 4 + 5, result should be 9");
|
||||
expect(result[1].type).toBe("test");
|
||||
expect(result[2].name).toBe("Database $db");
|
||||
expect(result[2].type).toBe("describe");
|
||||
});
|
||||
|
||||
test("should handle Unicode and emoji in test names", () => {
|
||||
const content = `
|
||||
test("测试中文", () => {});
|
||||
test("テスト日本語", () => {});
|
||||
test("тест русский", () => {});
|
||||
test("🚀 rocket test", () => {});
|
||||
test("Test with 🎉 celebration", () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(5);
|
||||
expect(result[0].name).toBe("测试中文");
|
||||
expect(result[1].name).toBe("テスト日本語");
|
||||
expect(result[2].name).toBe("тест русский");
|
||||
expect(result[3].name).toBe("🚀 rocket test");
|
||||
expect(result[4].name).toBe("Test with 🎉 celebration");
|
||||
});
|
||||
|
||||
test("should handle test names with interpolation-like syntax", () => {
|
||||
const content = `
|
||||
test("test with \${variable}", () => {});
|
||||
test("test with \$dollar", () => {});
|
||||
test("test with %percent", () => {});
|
||||
test(\`template literal test\`, () => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("test with ${variable}");
|
||||
expect(result[1].name).toBe("test with $dollar");
|
||||
expect(result[2].name).toBe("test with %percent");
|
||||
expect(result[3].name).toBe("template literal test");
|
||||
});
|
||||
|
||||
test("should handle async/await in test definitions", () => {
|
||||
const content = `
|
||||
test("sync test", () => {});
|
||||
test("async test", async () => {});
|
||||
test("test with await", async () => {
|
||||
await something();
|
||||
});
|
||||
it("async it", async function() {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result).toHaveLength(4);
|
||||
expect(result[0].name).toBe("sync test");
|
||||
expect(result[1].name).toBe("async test");
|
||||
expect(result[2].name).toBe("test with await");
|
||||
expect(result[3].name).toBe("async it");
|
||||
});
|
||||
|
||||
test("should handle generator functions and other ES6+ syntax", () => {
|
||||
const content = `
|
||||
test("generator test", function* () {
|
||||
yield 1;
|
||||
});
|
||||
|
||||
test.each\`
|
||||
a | b | expected
|
||||
\${1} | \${1} | \${2}
|
||||
\${1} | \${2} | \${3}
|
||||
\`('$a + $b = $expected', ({ a, b, expected }) => {});
|
||||
`;
|
||||
|
||||
const result = parseTestBlocks(content);
|
||||
|
||||
expect(result.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result[0].name).toBe("generator test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBraceDepth", () => {
|
||||
test("should count braces correctly", () => {
|
||||
const content = "{ { } }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
expect(getBraceDepth(content, 0, 3)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 5)).toBe(1);
|
||||
});
|
||||
|
||||
test("should ignore braces in strings", () => {
|
||||
const content = '{ "string with { braces }" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should ignore braces in template literals", () => {
|
||||
const content = "{ `template with { braces }` }";
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle escaped quotes", () => {
|
||||
const content = '{ "escaped \\" quote" }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle mixed quotes", () => {
|
||||
const content = `{ "double" + 'single' + \`template\` }`;
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle nested braces", () => {
|
||||
const content = "{ a: { b: { c: 1 } } }";
|
||||
expect(getBraceDepth(content, 0, 10)).toBe(2);
|
||||
expect(getBraceDepth(content, 0, 15)).toBe(3);
|
||||
});
|
||||
|
||||
test("should handle complex template literals", () => {
|
||||
const content = '{ `${foo({ bar: "baz" })} and ${nested.value}` }';
|
||||
expect(getBraceDepth(content, 0, content.length)).toBe(0);
|
||||
});
|
||||
|
||||
test("should handle edge cases", () => {
|
||||
expect(getBraceDepth("", 0, 0)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{}}}", 0, 6)).toBe(0);
|
||||
|
||||
expect(getBraceDepth("{{{", 0, 3)).toBe(3);
|
||||
expect(getBraceDepth("}}}", 0, 3)).toBe(-3);
|
||||
|
||||
const templateContent = "{ `${foo}` + `${bar}` }";
|
||||
expect(getBraceDepth(templateContent, 0, templateContent.length)).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,570 +0,0 @@
|
||||
/**
|
||||
* Mock VSCode types and classes for testing
|
||||
* These should be as close as possible to the real VSCode API
|
||||
*/
|
||||
|
||||
export interface MockUri {
|
||||
readonly scheme: string;
|
||||
readonly authority: string;
|
||||
readonly path: string;
|
||||
readonly query: string;
|
||||
readonly fragment: string;
|
||||
readonly fsPath: string;
|
||||
toString(): string;
|
||||
}
|
||||
|
||||
export class MockUri implements MockUri {
|
||||
constructor(
|
||||
public readonly scheme: string,
|
||||
public readonly authority: string,
|
||||
public readonly path: string,
|
||||
public readonly query: string,
|
||||
public readonly fragment: string,
|
||||
public readonly fsPath: string,
|
||||
) {}
|
||||
|
||||
static file(path: string): MockUri {
|
||||
return new MockUri("file", "", path, "", "", path);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return `${this.scheme}://${this.authority}${this.path}`;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockPosition {
|
||||
constructor(
|
||||
public readonly line: number,
|
||||
public readonly character: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockRange {
|
||||
constructor(
|
||||
public readonly start: MockPosition,
|
||||
public readonly end: MockPosition,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockLocation {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly range: MockRange,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class MockTestTag {
|
||||
constructor(public readonly id: string) {}
|
||||
}
|
||||
|
||||
export class MockTestMessage {
|
||||
public location?: MockLocation;
|
||||
public actualOutput?: string;
|
||||
public expectedOutput?: string;
|
||||
|
||||
constructor(public message: string | MockMarkdownString) {}
|
||||
|
||||
static diff(message: string, expected: string, actual: string): MockTestMessage {
|
||||
const msg = new MockTestMessage(message);
|
||||
msg.expectedOutput = expected;
|
||||
msg.actualOutput = actual;
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockMarkdownString {
|
||||
constructor(public value: string = "") {}
|
||||
|
||||
appendCodeblock(code: string, language?: string): MockMarkdownString {
|
||||
this.value += `\n\`\`\`${language || ""}\n${code}\n\`\`\``;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendMarkdown(value: string): MockMarkdownString {
|
||||
this.value += value;
|
||||
return this;
|
||||
}
|
||||
|
||||
appendText(value: string): MockMarkdownString {
|
||||
this.value += value.replace(/[\\`*_{}[\]()#+\-.!]/g, "\\$&");
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestItem {
|
||||
readonly id: string;
|
||||
readonly uri?: MockUri;
|
||||
readonly children: MockTestItemCollection;
|
||||
readonly parent?: MockTestItem;
|
||||
label: string;
|
||||
description?: string;
|
||||
tags: readonly MockTestTag[];
|
||||
canResolveChildren: boolean;
|
||||
busy: boolean;
|
||||
range?: MockRange;
|
||||
error?: string | MockMarkdownString;
|
||||
}
|
||||
|
||||
export interface MockTestItemCollection {
|
||||
readonly size: number;
|
||||
add(item: MockTestItem): void;
|
||||
replace(items: readonly MockTestItem[]): void;
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void;
|
||||
get(itemId: string): MockTestItem | undefined;
|
||||
delete(itemId: string): void;
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]>;
|
||||
}
|
||||
|
||||
export class MockTestItemCollection implements MockTestItemCollection {
|
||||
private items = new Map<string, MockTestItem>();
|
||||
|
||||
get size(): number {
|
||||
return this.items.size;
|
||||
}
|
||||
|
||||
add(item: MockTestItem): void {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
|
||||
replace(items: readonly MockTestItem[]): void {
|
||||
this.items.clear();
|
||||
for (const item of items) {
|
||||
this.items.set(item.id, item);
|
||||
}
|
||||
}
|
||||
|
||||
forEach(callback: (item: MockTestItem, id: string, collection: MockTestItemCollection) => void): void {
|
||||
this.items.forEach((item, id) => callback(item, id, this));
|
||||
}
|
||||
|
||||
get(itemId: string): MockTestItem | undefined {
|
||||
return this.items.get(itemId);
|
||||
}
|
||||
|
||||
delete(itemId: string): void {
|
||||
this.items.delete(itemId);
|
||||
}
|
||||
|
||||
[Symbol.iterator](): Iterator<[string, MockTestItem]> {
|
||||
return this.items[Symbol.iterator]();
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
|
||||
set(id: string, item: MockTestItem): void {
|
||||
this.items.set(id, item);
|
||||
}
|
||||
|
||||
values(): IterableIterator<MockTestItem> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
keys(): IterableIterator<string> {
|
||||
return this.items.keys();
|
||||
}
|
||||
|
||||
entries(): IterableIterator<[string, MockTestItem]> {
|
||||
return this.items.entries();
|
||||
}
|
||||
}
|
||||
|
||||
export class MockTestItem implements MockTestItem {
|
||||
public canResolveChildren: boolean = false;
|
||||
public busy: boolean = false;
|
||||
public description?: string;
|
||||
public range?: MockRange;
|
||||
public error?: string | MockMarkdownString;
|
||||
public readonly children: MockTestItemCollection;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public label: string,
|
||||
public readonly uri?: MockUri,
|
||||
public readonly parent?: MockTestItem,
|
||||
public tags: readonly MockTestTag[] = [],
|
||||
) {
|
||||
this.children = new MockTestItemCollection();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestController {
|
||||
readonly items: MockTestItemCollection;
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem;
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile;
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun;
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void;
|
||||
resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
}
|
||||
|
||||
export class MockTestController implements MockTestController {
|
||||
public readonly items: MockTestItemCollection;
|
||||
public resolveHandler?: (item: MockTestItem | undefined) => Promise<void> | void;
|
||||
public refreshHandler?: (token?: MockCancellationToken) => Promise<void> | void;
|
||||
|
||||
constructor(
|
||||
public readonly id: string,
|
||||
public readonly label: string,
|
||||
) {
|
||||
this.items = new MockTestItemCollection();
|
||||
}
|
||||
|
||||
createTestItem(id: string, label: string, uri?: MockUri): MockTestItem {
|
||||
return new MockTestItem(id, label, uri);
|
||||
}
|
||||
|
||||
createRunProfile(
|
||||
label: string,
|
||||
kind: MockTestRunProfileKind,
|
||||
runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
isDefault?: boolean,
|
||||
): MockTestRunProfile {
|
||||
return new MockTestRunProfile(label, kind, runHandler, isDefault);
|
||||
}
|
||||
|
||||
createTestRun(request: MockTestRunRequest, name?: string, persist?: boolean): MockTestRun {
|
||||
return new MockTestRun(name, persist);
|
||||
}
|
||||
|
||||
invalidateTestResults(items?: readonly MockTestItem[]): void {
|
||||
// Mock implementation - in real VSCode this would invalidate test results
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.items.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export enum MockTestRunProfileKind {
|
||||
Run = 1,
|
||||
Debug = 2,
|
||||
Coverage = 3,
|
||||
}
|
||||
|
||||
export interface MockTestRunProfile {
|
||||
readonly label: string;
|
||||
readonly kind: MockTestRunProfileKind;
|
||||
readonly isDefault: boolean;
|
||||
readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>;
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockTestRunProfile implements MockTestRunProfile {
|
||||
constructor(
|
||||
public readonly label: string,
|
||||
public readonly kind: MockTestRunProfileKind,
|
||||
public readonly runHandler: (request: MockTestRunRequest, token: MockCancellationToken) => void | Promise<void>,
|
||||
public readonly isDefault: boolean = false,
|
||||
) {}
|
||||
|
||||
dispose(): void {
|
||||
// No-op for mock
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTestRunRequest {
|
||||
readonly include?: readonly MockTestItem[];
|
||||
readonly exclude?: readonly MockTestItem[];
|
||||
readonly profile?: MockTestRunProfile;
|
||||
}
|
||||
|
||||
export class MockTestRunRequest implements MockTestRunRequest {
|
||||
constructor(
|
||||
public readonly include?: readonly MockTestItem[],
|
||||
public readonly exclude?: readonly MockTestItem[],
|
||||
public readonly profile?: MockTestRunProfile,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockTestRun {
|
||||
readonly name?: string;
|
||||
readonly token: MockCancellationToken;
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void;
|
||||
end(): void;
|
||||
enqueued(test: MockTestItem): void;
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void;
|
||||
passed(test: MockTestItem, duration?: number): void;
|
||||
skipped(test: MockTestItem): void;
|
||||
started(test: MockTestItem): void;
|
||||
}
|
||||
|
||||
export class MockTestRun implements MockTestRun {
|
||||
public readonly token: MockCancellationToken;
|
||||
private _ended: boolean = false;
|
||||
|
||||
constructor(
|
||||
public readonly name?: string,
|
||||
public readonly persist: boolean = true,
|
||||
) {
|
||||
this.token = new MockCancellationToken();
|
||||
}
|
||||
|
||||
appendOutput(output: string, location?: MockLocation, test?: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// For mock, just store output - in real VS Code this would appear in test output
|
||||
}
|
||||
|
||||
end(): void {
|
||||
this._ended = true;
|
||||
}
|
||||
|
||||
enqueued(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
errored(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
failed(test: MockTestItem, message: MockTestMessage | readonly MockTestMessage[], duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
passed(test: MockTestItem, duration?: number): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
skipped(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
started(test: MockTestItem): void {
|
||||
if (this._ended) return;
|
||||
// Mock implementation
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockCancellationToken {
|
||||
readonly isCancellationRequested: boolean;
|
||||
onCancellationRequested(listener: () => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockCancellationToken implements MockCancellationToken {
|
||||
private _isCancellationRequested: boolean = false;
|
||||
private _listeners: (() => void)[] = [];
|
||||
|
||||
get isCancellationRequested(): boolean {
|
||||
return this._isCancellationRequested;
|
||||
}
|
||||
|
||||
onCancellationRequested(listener: () => void): MockDisposable {
|
||||
this._listeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._listeners.indexOf(listener);
|
||||
if (index >= 0) {
|
||||
this._listeners.splice(index, 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this._isCancellationRequested = true;
|
||||
this._listeners.forEach(listener => listener());
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockDisposable {
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class MockDisposable implements MockDisposable {
|
||||
constructor(private readonly disposeFunc?: () => void) {}
|
||||
|
||||
dispose(): void {
|
||||
this.disposeFunc?.();
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockTextDocument {
|
||||
readonly uri: MockUri;
|
||||
readonly fileName: string;
|
||||
readonly isUntitled: boolean;
|
||||
readonly languageId: string;
|
||||
readonly version: number;
|
||||
readonly isDirty: boolean;
|
||||
readonly isClosed: boolean;
|
||||
readonly eol: MockEndOfLine;
|
||||
readonly lineCount: number;
|
||||
getText(range?: MockRange): string;
|
||||
getWordRangeAtPosition(position: MockPosition, regex?: RegExp): MockRange | undefined;
|
||||
lineAt(line: number | MockPosition): MockTextLine;
|
||||
offsetAt(position: MockPosition): number;
|
||||
positionAt(offset: number): MockPosition;
|
||||
save(): Promise<boolean>;
|
||||
validatePosition(position: MockPosition): MockPosition;
|
||||
validateRange(range: MockRange): MockRange;
|
||||
}
|
||||
|
||||
export enum MockEndOfLine {
|
||||
LF = 1,
|
||||
CRLF = 2,
|
||||
}
|
||||
|
||||
export interface MockTextLine {
|
||||
readonly lineNumber: number;
|
||||
readonly text: string;
|
||||
readonly range: MockRange;
|
||||
readonly rangeIncludingLineBreak: MockRange;
|
||||
readonly firstNonWhitespaceCharacterIndex: number;
|
||||
readonly isEmptyOrWhitespace: boolean;
|
||||
}
|
||||
|
||||
export interface MockWorkspaceFolder {
|
||||
readonly uri: MockUri;
|
||||
readonly name: string;
|
||||
readonly index: number;
|
||||
}
|
||||
|
||||
export class MockWorkspaceFolder implements MockWorkspaceFolder {
|
||||
constructor(
|
||||
public readonly uri: MockUri,
|
||||
public readonly name: string,
|
||||
public readonly index: number = 0,
|
||||
) {}
|
||||
}
|
||||
|
||||
export interface MockFileSystemWatcher extends MockDisposable {
|
||||
readonly ignoreCreateEvents: boolean;
|
||||
readonly ignoreChangeEvents: boolean;
|
||||
readonly ignoreDeleteEvents: boolean;
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable;
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable;
|
||||
}
|
||||
|
||||
export class MockFileSystemWatcher implements MockFileSystemWatcher {
|
||||
public readonly ignoreCreateEvents: boolean = false;
|
||||
public readonly ignoreChangeEvents: boolean = false;
|
||||
public readonly ignoreDeleteEvents: boolean = false;
|
||||
|
||||
private _createListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _changeListeners: ((uri: MockUri) => void)[] = [];
|
||||
private _deleteListeners: ((uri: MockUri) => void)[] = [];
|
||||
|
||||
onDidCreate(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._createListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._createListeners.indexOf(listener);
|
||||
if (index >= 0) this._createListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidChange(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._changeListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._changeListeners.indexOf(listener);
|
||||
if (index >= 0) this._changeListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
onDidDelete(listener: (uri: MockUri) => void): MockDisposable {
|
||||
this._deleteListeners.push(listener);
|
||||
return new MockDisposable(() => {
|
||||
const index = this._deleteListeners.indexOf(listener);
|
||||
if (index >= 0) this._deleteListeners.splice(index, 1);
|
||||
});
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this._createListeners.length = 0;
|
||||
this._changeListeners.length = 0;
|
||||
this._deleteListeners.length = 0;
|
||||
}
|
||||
|
||||
// Helper methods for testing
|
||||
triggerCreate(uri: MockUri): void {
|
||||
this._createListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerChange(uri: MockUri): void {
|
||||
this._changeListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
|
||||
triggerDelete(uri: MockUri): void {
|
||||
this._deleteListeners.forEach(listener => listener(uri));
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockRelativePattern {
|
||||
readonly base: string;
|
||||
readonly pattern: string;
|
||||
}
|
||||
|
||||
export class MockRelativePattern implements MockRelativePattern {
|
||||
constructor(
|
||||
public readonly base: string | MockWorkspaceFolder,
|
||||
public readonly pattern: string,
|
||||
) {}
|
||||
|
||||
get baseUri(): MockUri {
|
||||
if (typeof this.base === "string") {
|
||||
return MockUri.file(this.base);
|
||||
}
|
||||
return this.base.uri;
|
||||
}
|
||||
}
|
||||
|
||||
export interface MockConfiguration {
|
||||
get<T>(section: string, defaultValue?: T): T | undefined;
|
||||
has(section: string): boolean;
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined;
|
||||
update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void>;
|
||||
}
|
||||
|
||||
export interface MockConfigurationInspect<T> {
|
||||
readonly key: string;
|
||||
readonly defaultValue?: T;
|
||||
readonly globalValue?: T;
|
||||
readonly workspaceValue?: T;
|
||||
readonly workspaceFolderValue?: T;
|
||||
}
|
||||
|
||||
export enum MockConfigurationTarget {
|
||||
Global = 1,
|
||||
Workspace = 2,
|
||||
WorkspaceFolder = 3,
|
||||
}
|
||||
|
||||
export class MockConfiguration implements MockConfiguration {
|
||||
private _values = new Map<string, any>();
|
||||
|
||||
get<T>(section: string, defaultValue?: T): T | undefined {
|
||||
return this._values.get(section) ?? defaultValue;
|
||||
}
|
||||
|
||||
has(section: string): boolean {
|
||||
return this._values.has(section);
|
||||
}
|
||||
|
||||
inspect<T>(section: string): MockConfigurationInspect<T> | undefined {
|
||||
return {
|
||||
key: section,
|
||||
defaultValue: undefined,
|
||||
globalValue: this._values.get(section),
|
||||
workspaceValue: undefined,
|
||||
workspaceFolderValue: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async update(section: string, value: any, configurationTarget?: MockConfigurationTarget): Promise<void> {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
|
||||
// Helper for testing
|
||||
setValue(section: string, value: any): void {
|
||||
this._values.set(section, value);
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { mock } from "bun:test";
|
||||
import {
|
||||
MockConfiguration,
|
||||
MockDisposable,
|
||||
MockFileSystemWatcher,
|
||||
MockLocation,
|
||||
MockMarkdownString,
|
||||
MockPosition,
|
||||
MockRange,
|
||||
MockRelativePattern,
|
||||
MockTestController,
|
||||
MockTestMessage,
|
||||
MockTestRunProfileKind,
|
||||
MockTestTag,
|
||||
MockUri,
|
||||
MockWorkspaceFolder,
|
||||
} from "./vscode-types.mock";
|
||||
|
||||
mock.module("vscode", () => ({
|
||||
window: {
|
||||
createOutputChannel: () => ({
|
||||
appendLine: () => {},
|
||||
}),
|
||||
visibleTextEditors: [],
|
||||
},
|
||||
workspace: {
|
||||
getConfiguration: (section?: string) => new MockConfiguration(),
|
||||
onDidOpenTextDocument: () => new MockDisposable(),
|
||||
textDocuments: [],
|
||||
createFileSystemWatcher: (pattern: string | MockRelativePattern) => new MockFileSystemWatcher(),
|
||||
findFiles: async (include: string, exclude?: string, maxResults?: number, token?: any) => {
|
||||
return []; // Mock implementation
|
||||
},
|
||||
},
|
||||
Uri: MockUri,
|
||||
TestTag: MockTestTag,
|
||||
Position: MockPosition,
|
||||
Range: MockRange,
|
||||
Location: MockLocation,
|
||||
TestMessage: MockTestMessage,
|
||||
MarkdownString: MockMarkdownString,
|
||||
TestRunProfileKind: MockTestRunProfileKind,
|
||||
RelativePattern: MockRelativePattern,
|
||||
debug: {
|
||||
addBreakpoints: () => {},
|
||||
startDebugging: async () => true,
|
||||
},
|
||||
}));
|
||||
|
||||
export function makeTestController(): MockTestController {
|
||||
return new MockTestController("test-controller", "Test Controller");
|
||||
}
|
||||
|
||||
export function makeWorkspaceFolder(path: string): MockWorkspaceFolder {
|
||||
return new MockWorkspaceFolder(MockUri.file(path), path.split("/").pop() || "workspace", 0);
|
||||
}
|
||||
@@ -17,7 +17,7 @@ export const debug = vscode.window.createOutputChannel("Bun - Test Runner");
|
||||
|
||||
export type TestNode = {
|
||||
name: string;
|
||||
type: "describe" | "test";
|
||||
type: "describe" | "test" | "it";
|
||||
line: number;
|
||||
children: TestNode[];
|
||||
parent?: TestNode;
|
||||
@@ -51,15 +51,11 @@ export class BunTestController implements vscode.Disposable {
|
||||
private currentRunType: "file" | "individual" = "file";
|
||||
private requestedTestIds: Set<string> = new Set();
|
||||
private discoveredTestIds: Set<string> = new Set();
|
||||
private executedTestCount: number = 0;
|
||||
private totalTestsStarted: number = 0;
|
||||
|
||||
constructor(
|
||||
private readonly testController: vscode.TestController,
|
||||
private readonly workspaceFolder: vscode.WorkspaceFolder,
|
||||
readonly isTest: boolean = false,
|
||||
) {
|
||||
if (isTest) return;
|
||||
this.setupTestController();
|
||||
this.setupWatchers();
|
||||
this.setupOpenDocumentListener();
|
||||
@@ -71,7 +67,10 @@ export class BunTestController implements vscode.Disposable {
|
||||
try {
|
||||
this.signal = await this.createSignal();
|
||||
await this.signal.ready;
|
||||
debug.appendLine(`Signal initialized at: ${this.signal.url}`);
|
||||
|
||||
this.signal.on("Signal.Socket.connect", (socket: net.Socket) => {
|
||||
debug.appendLine("Bun connected to signal socket");
|
||||
this.handleSocketConnection(socket, this.currentRun!);
|
||||
});
|
||||
|
||||
@@ -90,9 +89,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
};
|
||||
|
||||
this.testController.refreshHandler = async token => {
|
||||
const files = await this.discoverInitialTests(token, false);
|
||||
const files = await this.discoverInitialTests(token);
|
||||
if (!files?.length) return;
|
||||
if (token.isCancellationRequested) return;
|
||||
|
||||
const filePaths = new Set(files.map(f => f.fsPath));
|
||||
for (const [, testItem] of this.testController.items) {
|
||||
@@ -136,21 +134,15 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
private isTestFile(document: vscode.TextDocument): boolean {
|
||||
return (
|
||||
document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mjs|mts|cts)$/.test(document.uri.fsPath)
|
||||
);
|
||||
return document?.uri?.scheme === "file" && /\.(test|spec)\.(js|jsx|ts|tsx|cjs|mts)$/.test(document.uri.fsPath);
|
||||
}
|
||||
|
||||
private async discoverInitialTests(
|
||||
cancellationToken?: vscode.CancellationToken,
|
||||
reset: boolean = true,
|
||||
): Promise<vscode.Uri[] | undefined> {
|
||||
private async discoverInitialTests(cancellationToken?: vscode.CancellationToken): Promise<vscode.Uri[] | undefined> {
|
||||
try {
|
||||
const tests = await this.findTestFiles(cancellationToken);
|
||||
this.createFileTestItems(tests, reset);
|
||||
this.createFileTestItems(tests);
|
||||
return tests;
|
||||
} catch (error) {
|
||||
debug.appendLine(`Error in discoverInitialTests: ${error}`);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -187,8 +179,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
const ignoreGlobs = new Set(["**/node_modules/**"]);
|
||||
|
||||
for (const ignore of ignores) {
|
||||
if (cancellationToken?.isCancellationRequested) return [];
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(ignore.fsPath, { encoding: "utf8" });
|
||||
const lines = content
|
||||
@@ -205,15 +195,13 @@ export class BunTestController implements vscode.Disposable {
|
||||
ignoreGlobs.add(path.join(cwd.trim(), line.trim()));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
debug.appendLine(`Error in buildIgnoreGlobs: ${ignore.fsPath}`);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return [...ignoreGlobs.values()];
|
||||
}
|
||||
|
||||
private createFileTestItems(files: vscode.Uri[], reset: boolean = true): void {
|
||||
private createFileTestItems(files: vscode.Uri[]): void {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
}
|
||||
@@ -226,9 +214,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
path.relative(this.workspaceFolder.uri.fsPath, file.fsPath) || file.fsPath,
|
||||
file,
|
||||
);
|
||||
if (reset) {
|
||||
fileTestItem.children.replace([]);
|
||||
}
|
||||
fileTestItem.children.replace([]);
|
||||
fileTestItem.canResolveChildren = true;
|
||||
this.testController.items.add(fileTestItem);
|
||||
}
|
||||
@@ -288,13 +274,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
return { bunCommand, testArgs };
|
||||
}
|
||||
|
||||
private async discoverTests(
|
||||
testItem?: vscode.TestItem | false,
|
||||
filePath?: string,
|
||||
cancellationToken?: vscode.CancellationToken,
|
||||
): Promise<void> {
|
||||
if (cancellationToken?.isCancellationRequested) return;
|
||||
|
||||
private async discoverTests(testItem?: vscode.TestItem | false, filePath?: string): Promise<void> {
|
||||
let targetPath = filePath;
|
||||
if (!targetPath && testItem) {
|
||||
targetPath = testItem?.uri?.fsPath || this.workspaceFolder.uri.fsPath;
|
||||
@@ -317,24 +297,17 @@ export class BunTestController implements vscode.Disposable {
|
||||
);
|
||||
this.testController.items.add(fileTestItem);
|
||||
}
|
||||
if (!this.currentRun) {
|
||||
fileTestItem.children.replace([]);
|
||||
}
|
||||
fileTestItem.children.replace([]);
|
||||
fileTestItem.canResolveChildren = false;
|
||||
|
||||
this.addTestNodes(testNodes, fileTestItem, targetPath);
|
||||
} catch {
|
||||
debug.appendLine(`Error in discoverTests: ${targetPath}`);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
private parseTestBlocks(fileContent: string): TestNode[] {
|
||||
const cleanContent = fileContent
|
||||
.replace(/\/\*[\s\S]*?\*\//g, match => match.replace(/[^\n\r]/g, " "))
|
||||
.replace(/('(?:[^'\\]|\\.)*'|"(?:[^"\\]|\\.)*"|`(?:[^`\\]|\\.)*`)|\/\/.*$/gm, (match, str) => {
|
||||
if (str) return str;
|
||||
return " ".repeat(match.length);
|
||||
});
|
||||
.replace(/\/\/.*$/gm, match => " ".repeat(match.length));
|
||||
|
||||
const testRegex =
|
||||
/\b(describe|test|it)(?:\.(?:skip|todo|failing|only))?(?:\.(?:if|todoIf|skipIf)\s*\([^)]*\))?(?:\.each\s*\([^)]*\))?\s*\(\s*(['"`])((?:\\\2|.)*?)\2\s*(?:,|\))/g;
|
||||
@@ -346,7 +319,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
match = testRegex.exec(cleanContent);
|
||||
while (match !== null) {
|
||||
const [full, type, , name] = match;
|
||||
const _type = type === "it" ? "test" : type;
|
||||
const line = cleanContent.slice(0, match.index).split("\n").length - 1;
|
||||
|
||||
while (
|
||||
@@ -357,14 +329,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
const expandedNodes = this.expandEachTests(
|
||||
full,
|
||||
name,
|
||||
cleanContent,
|
||||
match.index,
|
||||
_type as TestNode["type"],
|
||||
line,
|
||||
);
|
||||
const expandedNodes = this.expandEachTests(full, name, cleanContent, match.index, type as TestNode["type"], line);
|
||||
|
||||
for (const node of expandedNodes) {
|
||||
if (stack.length === 0) {
|
||||
@@ -468,16 +433,16 @@ export class BunTestController implements vscode.Disposable {
|
||||
throw new Error("Not an array");
|
||||
}
|
||||
|
||||
return eachValues.map((val, testIndex) => {
|
||||
let testName = name.replace(/%%/g, "%").replace(/%#/g, (testIndex + 1).toString());
|
||||
return eachValues.map(val => {
|
||||
let testName = name;
|
||||
if (Array.isArray(val)) {
|
||||
let idx = 0;
|
||||
testName = testName.replace(/%[isfdojp#%]/g, () => {
|
||||
testName = testName.replace(/%[isfd]/g, () => {
|
||||
const v = val[idx++];
|
||||
return typeof v === "object" ? JSON.stringify(v) : String(v);
|
||||
});
|
||||
} else {
|
||||
testName = testName.replace(/%[isfdojp#%]/g, () => {
|
||||
testName = testName.replace(/%[isfd]/g, () => {
|
||||
return typeof val === "object" ? JSON.stringify(val) : String(val);
|
||||
});
|
||||
}
|
||||
@@ -510,22 +475,19 @@ export class BunTestController implements vscode.Disposable {
|
||||
: this.escapeTestName(node.name);
|
||||
const testId = `${filePath}#${nodePath}`;
|
||||
|
||||
let testItem = parent.children.get(testId);
|
||||
if (!testItem) {
|
||||
testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
|
||||
const testItem = this.testController.createTestItem(testId, this.stripAnsi(node.name), vscode.Uri.file(filePath));
|
||||
|
||||
if (node.type) testItem.tags = [new vscode.TestTag(node.type)];
|
||||
testItem.tags = [new vscode.TestTag(node.type === "describe" ? "describe" : "test")];
|
||||
|
||||
if (typeof node.line === "number") {
|
||||
testItem.range = new vscode.Range(
|
||||
new vscode.Position(node.line, 0),
|
||||
new vscode.Position(node.line, node.name.length),
|
||||
);
|
||||
}
|
||||
|
||||
parent.children.add(testItem);
|
||||
if (typeof node.line === "number") {
|
||||
testItem.range = new vscode.Range(
|
||||
new vscode.Position(node.line, 0),
|
||||
new vscode.Position(node.line, node.name.length),
|
||||
);
|
||||
}
|
||||
|
||||
parent.children.add(testItem);
|
||||
|
||||
if (node.children.length > 0) {
|
||||
this.addTestNodes(node.children, testItem, filePath, nodePath);
|
||||
}
|
||||
@@ -538,7 +500,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
private escapeTestName(source: string): string {
|
||||
return source.replace(/[^\w \-\u0080-\uFFFF]/g, "\\$&");
|
||||
return source.replace(/[^a-zA-Z0-9_\ ]/g, "\\$&");
|
||||
}
|
||||
|
||||
private async createSignal(): Promise<UnixSignal | TCPSocketSignal> {
|
||||
@@ -555,23 +517,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
token: vscode.CancellationToken,
|
||||
isDebug: boolean,
|
||||
): Promise<void> {
|
||||
if (this.currentRun) {
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
if (this.currentRun) {
|
||||
this.currentRun.appendOutput("\n\x1b[33mCancelled: Starting new test run\x1b[0m\n");
|
||||
this.currentRun.end();
|
||||
this.currentRun = null;
|
||||
}
|
||||
}
|
||||
this.totalTestsStarted++;
|
||||
if (this.totalTestsStarted > 15) {
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
this.signal?.close();
|
||||
this.signal = null;
|
||||
}
|
||||
|
||||
const run = this.testController.createTestRun(request);
|
||||
|
||||
token.onCancellationRequested(() => {
|
||||
@@ -580,14 +525,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.disconnectInspector();
|
||||
});
|
||||
|
||||
if ("onDidDispose" in run) {
|
||||
(run.onDidDispose as vscode.Event<void>)(() => {
|
||||
run?.end?.();
|
||||
this.closeAllActiveProcesses();
|
||||
this.disconnectInspector();
|
||||
});
|
||||
}
|
||||
|
||||
const queue: vscode.TestItem[] = [];
|
||||
|
||||
if (request.include) {
|
||||
@@ -610,9 +547,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
await this.runTestsWithInspector(queue, run, token);
|
||||
} catch (error) {
|
||||
for (const test of queue) {
|
||||
const msg = new vscode.TestMessage(`Error: ${error}`);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
run.errored(test, new vscode.TestMessage(`Error: ${error}`));
|
||||
}
|
||||
} finally {
|
||||
run.end();
|
||||
@@ -622,11 +557,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
private async runTestsWithInspector(
|
||||
tests: vscode.TestItem[],
|
||||
run: vscode.TestRun,
|
||||
token: vscode.CancellationToken,
|
||||
_token: vscode.CancellationToken,
|
||||
): Promise<void> {
|
||||
const time = performance.now();
|
||||
if (token.isCancellationRequested) return;
|
||||
|
||||
this.disconnectInspector();
|
||||
|
||||
const allFiles = new Set<string>();
|
||||
@@ -637,20 +569,13 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
|
||||
if (allFiles.size === 0) {
|
||||
const errorMsg = "No test files found to run.";
|
||||
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(errorMsg);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
throw new Error(errorMsg);
|
||||
run.appendOutput("No test files found to run.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const test of tests) {
|
||||
if (token.isCancellationRequested) return;
|
||||
if (test.uri && test.canResolveChildren) {
|
||||
await this.discoverTests(test, undefined, token);
|
||||
await this.discoverTests(test);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -659,7 +584,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
this.requestedTestIds.clear();
|
||||
this.discoveredTestIds.clear();
|
||||
this.executedTestCount = 0;
|
||||
for (const test of tests) {
|
||||
this.requestedTestIds.add(test.id);
|
||||
}
|
||||
@@ -683,38 +607,21 @@ export class BunTestController implements vscode.Disposable {
|
||||
resolve();
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
clearTimeout(timeout);
|
||||
this.signal!.off("Signal.Socket.connect", handleConnect);
|
||||
reject(new Error("Test run cancelled"));
|
||||
};
|
||||
token.onCancellationRequested(handleCancel);
|
||||
|
||||
this.signal!.once("Signal.Socket.connect", handleConnect);
|
||||
});
|
||||
|
||||
const { bunCommand, testArgs } = this.getBunExecutionConfig();
|
||||
|
||||
let args = [...testArgs, ...allFiles];
|
||||
let printedArgs = `\x1b[34;1m>\x1b[0m \x1b[34;1m${bunCommand} ${testArgs.join(" ")}\x1b[2m`;
|
||||
|
||||
for (const file of allFiles) {
|
||||
const f = path.relative(this.workspaceFolder.uri.fsPath, file) || file;
|
||||
if (f.includes(" ")) {
|
||||
printedArgs += ` ".${path.sep}${f}"`;
|
||||
} else {
|
||||
printedArgs += ` .${path.sep}${f}`;
|
||||
}
|
||||
}
|
||||
let args = [...testArgs, ...Array.from(allFiles)];
|
||||
|
||||
if (isIndividualTestRun) {
|
||||
const pattern = this.buildTestNamePattern(tests);
|
||||
if (pattern) {
|
||||
args.push("--test-name-pattern", pattern);
|
||||
printedArgs += `\x1b[0m\x1b[2m --test-name-pattern "${pattern}"\x1b[0m`;
|
||||
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
|
||||
}
|
||||
}
|
||||
run.appendOutput(printedArgs + "\x1b[0m\r\n\r\n");
|
||||
|
||||
run.appendOutput(`\r\n\x1b[34m>\x1b[0m \x1b[2m${bunCommand} ${args.join(" ")}\x1b[0m\r\n\r\n`);
|
||||
args.push(`--inspect-wait=${this.signal!.url}`);
|
||||
|
||||
for (const test of tests) {
|
||||
if (isIndividualTestRun || tests.length === 1) {
|
||||
@@ -724,52 +631,34 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
}
|
||||
|
||||
let inspectorUrl: string | undefined =
|
||||
this.signal.url.startsWith("ws") || this.signal.url.startsWith("tcp")
|
||||
? `${this.signal!.url}?wait=1`
|
||||
: `${this.signal!.url}`;
|
||||
|
||||
// right now there isnt a way to tell socket method to wait for the connection
|
||||
if (!inspectorUrl?.includes("?wait=1")) {
|
||||
args.push(`--inspect-wait=${this.signal!.url}`);
|
||||
inspectorUrl = undefined;
|
||||
}
|
||||
|
||||
const proc = spawn(bunCommand, args, {
|
||||
cwd: this.workspaceFolder.uri.fsPath,
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
FORCE_COLOR: "1",
|
||||
BUN_INSPECT: inspectorUrl,
|
||||
...process.env,
|
||||
NO_COLOR: "0",
|
||||
},
|
||||
});
|
||||
|
||||
this.activeProcesses.add(proc);
|
||||
|
||||
let stdout = "";
|
||||
|
||||
proc.on("exit", (code, signal) => {
|
||||
if (code !== 0 && code !== 1) {
|
||||
debug.appendLine(`Test process failed: exit ${code}, signal ${signal}`);
|
||||
}
|
||||
debug.appendLine(`Process exited with code ${code}, signal ${signal}`);
|
||||
});
|
||||
|
||||
proc.on("error", error => {
|
||||
stdout += `Process error: ${error.message}\n`;
|
||||
debug.appendLine(`Process error: ${error.message}`);
|
||||
});
|
||||
|
||||
proc.stdout?.on("data", data => {
|
||||
const dataStr = data.toString();
|
||||
stdout += dataStr;
|
||||
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
|
||||
run.appendOutput(formattedOutput);
|
||||
});
|
||||
|
||||
proc.stderr?.on("data", data => {
|
||||
const dataStr = data.toString();
|
||||
stdout += dataStr;
|
||||
const formattedOutput = dataStr.replace(/\n/g, "\r\n");
|
||||
run.appendOutput(formattedOutput);
|
||||
});
|
||||
@@ -777,57 +666,35 @@ export class BunTestController implements vscode.Disposable {
|
||||
try {
|
||||
await socketPromise;
|
||||
} catch (error) {
|
||||
debug.appendLine(`Connection failed: ${error} (URL: ${this.signal!.url})`);
|
||||
debug.appendLine(`Failed to establish inspector connection: ${error}`);
|
||||
debug.appendLine(`Signal URL was: ${this.signal!.url}`);
|
||||
debug.appendLine(`Command was: ${bunCommand} ${args.join(" ")}`);
|
||||
throw error;
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const handleClose = (code: number | null) => {
|
||||
proc.on("close", code => {
|
||||
this.activeProcesses.delete(proc);
|
||||
if (code === 0 || code === 1) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Process exited with code ${code}. Please check the console for more details.`));
|
||||
reject(new Error(`Process exited with code ${code}`));
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const handleError = (error: Error) => {
|
||||
proc.on("error", error => {
|
||||
this.activeProcesses.delete(proc);
|
||||
reject(error);
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
proc.kill("SIGTERM");
|
||||
this.activeProcesses.delete(proc);
|
||||
reject(new Error("Test run cancelled"));
|
||||
};
|
||||
|
||||
proc.on("close", handleClose);
|
||||
proc.on("error", handleError);
|
||||
|
||||
token.onCancellationRequested(handleCancel);
|
||||
});
|
||||
}).finally(() => {
|
||||
if (this.discoveredTestIds.size === 0) {
|
||||
const errorMsg =
|
||||
"No tests were executed. This could mean:\r\n- All tests were filtered out\r\n- The test runner crashed before running tests\r\n- No tests match the pattern";
|
||||
run.appendOutput(`\n\x1b[31m\x1b[1mError:\x1b[0m\x1b[31m ${errorMsg}\x1b[0m\n`);
|
||||
|
||||
for (const test of tests) {
|
||||
if (!this.testResultHistory.has(test.id)) {
|
||||
const msg = new vscode.TestMessage(errorMsg + "\n\n----------\n" + stdout + "\n----------\n");
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
}
|
||||
if (isIndividualTestRun) {
|
||||
this.applyPreviousResults(tests, run);
|
||||
}
|
||||
|
||||
if (this.discoveredTestIds.size > 0 && this.executedTestCount > 0) {
|
||||
if (isIndividualTestRun) {
|
||||
this.applyPreviousResults(tests, run);
|
||||
this.cleanupUndiscoveredTests(tests);
|
||||
} else {
|
||||
this.cleanupStaleTests(tests);
|
||||
}
|
||||
if (isIndividualTestRun) {
|
||||
this.cleanupUndiscoveredTests(tests);
|
||||
} else {
|
||||
this.cleanupStaleTests(tests);
|
||||
}
|
||||
|
||||
if (this.activeProcesses.has(proc)) {
|
||||
@@ -837,7 +704,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
this.disconnectInspector();
|
||||
this.currentRun = null;
|
||||
debug.appendLine(`Test run completed in ${performance.now() - time}ms`);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -859,7 +725,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
run.passed(item, previousResult.duration);
|
||||
break;
|
||||
case "failed":
|
||||
run.failed(item, [], previousResult.duration);
|
||||
run.failed(item, previousResult.message || new vscode.TestMessage("Test failed"), previousResult.duration);
|
||||
break;
|
||||
case "skipped":
|
||||
run.skipped(item);
|
||||
@@ -897,11 +763,16 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.handleLifecycleError(event, run);
|
||||
});
|
||||
|
||||
this.debugAdapter.on("Inspector.event", e => {
|
||||
debug.appendLine(`Received inspector event: ${e.method}`);
|
||||
});
|
||||
|
||||
this.debugAdapter.on("Inspector.error", e => {
|
||||
debug.appendLine(`Inspector error: ${e}`);
|
||||
});
|
||||
|
||||
socket.on("close", () => {
|
||||
debug.appendLine("Inspector connection closed");
|
||||
this.debugAdapter = null;
|
||||
});
|
||||
|
||||
@@ -928,6 +799,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const { id: inspectorTestId, url: sourceURL, name, type, parentId, line } = params;
|
||||
|
||||
if (!sourceURL) {
|
||||
debug.appendLine(`Warning: Test found without URL: ${name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -942,6 +814,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
this.inspectorToVSCode.set(inspectorTestId, testItem);
|
||||
this.vscodeToInspector.set(testItem.id, inspectorTestId);
|
||||
this.discoveredTestIds.add(testItem.id);
|
||||
} else {
|
||||
debug.appendLine(`Could not find VS Code test item for: ${name} in ${path.basename(filePath)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1057,7 +931,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
if (!testItem) return;
|
||||
|
||||
const duration = elapsed / 1000000;
|
||||
this.executedTestCount++;
|
||||
|
||||
if (
|
||||
this.currentRunType === "individual" &&
|
||||
@@ -1086,6 +959,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
break;
|
||||
case "skip":
|
||||
case "todo":
|
||||
case "skipped_because_label":
|
||||
run.skipped(testItem);
|
||||
this.testResultHistory.set(testItem.id, { status: "skipped" });
|
||||
break;
|
||||
@@ -1096,8 +970,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
run.failed(testItem, timeoutMsg, duration);
|
||||
this.testResultHistory.set(testItem.id, { status: "failed", message: timeoutMsg, duration });
|
||||
break;
|
||||
case "skipped_because_label":
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1206,10 +1078,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const lines = messageLinesRaw;
|
||||
|
||||
const errorLine = lines[0].trim();
|
||||
const messageLines = lines
|
||||
.slice(1)
|
||||
.filter(line => line.trim())
|
||||
.join("\n");
|
||||
const messageLines = lines.slice(1).join("\n");
|
||||
|
||||
const errorType = errorLine.replace(/^(E|e)rror: /, "").trim();
|
||||
|
||||
@@ -1221,8 +1090,8 @@ export class BunTestController implements vscode.Disposable {
|
||||
const regex = /^Expected:\s*([\s\S]*?)\nReceived:\s*([\s\S]*?)$/;
|
||||
let testMessage = vscode.TestMessage.diff(
|
||||
errorLine,
|
||||
messageLines.trim().match(regex)?.[1].trim() || "",
|
||||
messageLines.trim().match(regex)?.[2].trim() || "",
|
||||
messageLines.match(regex)?.[1].trim() || "",
|
||||
messageLines.match(regex)?.[2].trim() || "",
|
||||
);
|
||||
if (!messageLines.match(regex)) {
|
||||
const code = messageLines
|
||||
@@ -1284,7 +1153,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
lastEffortMsg = lastEffortMsg.reverse();
|
||||
}
|
||||
|
||||
const msg = errorType.startsWith("expect")
|
||||
const msg = errorLine.startsWith("error: expect")
|
||||
? `${lastEffortMsg.join("\n")}\n${errorLine.trim()}`.trim()
|
||||
: `${errorLine.trim()}\n${messageLines}`.trim();
|
||||
|
||||
@@ -1332,15 +1201,12 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
t = t.replaceAll(/\$\{[^}]+\}/g, ".*?");
|
||||
t = t.replaceAll(/\\\$\\\{[^}]+\\\}/g, ".*?");
|
||||
t = t.replaceAll(/\\%[isfdojp#%]|(\\%)|(\\#)/g, ".*?");
|
||||
t = t.replaceAll(/\$[\w\.\[\]]+/g, ".*?");
|
||||
t = t.replaceAll(/\\%[isfd]/g, ".*?");
|
||||
|
||||
if (test?.tags?.some(tag => tag.id === "test" || tag.id === "it")) {
|
||||
if (test.tags.some(tag => tag.id === "test" || tag.id === "it")) {
|
||||
testNames.push(`^ ${t}$`);
|
||||
} else if (test?.tags?.some(tag => tag.id === "describe")) {
|
||||
testNames.push(`^ ${t} `);
|
||||
} else {
|
||||
testNames.push(t);
|
||||
testNames.push(`^ ${t} `);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1376,13 +1242,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
const isIndividualTestRun = this.shouldUseTestNamePattern(tests);
|
||||
|
||||
if (testFiles.size === 0) {
|
||||
const errorMsg = "No test files found to debug.";
|
||||
run.appendOutput(`\x1b[31mError: ${errorMsg}\x1b[0m\n`);
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(errorMsg);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
}
|
||||
run.appendOutput("No test files found to debug.\n");
|
||||
run.end();
|
||||
return;
|
||||
}
|
||||
@@ -1408,7 +1268,7 @@ export class BunTestController implements vscode.Disposable {
|
||||
|
||||
const pattern = this.buildTestNamePattern(tests);
|
||||
if (pattern) {
|
||||
args.push("--test-name-pattern", pattern);
|
||||
args.push("--test-name-pattern", process.platform === "win32" ? `"${pattern}"` : pattern);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1429,12 +1289,9 @@ export class BunTestController implements vscode.Disposable {
|
||||
if (!res) throw new Error("Failed to start debugging session");
|
||||
} catch (error) {
|
||||
for (const test of tests) {
|
||||
const msg = new vscode.TestMessage(`Error starting debugger: ${error}`);
|
||||
msg.location = new vscode.Location(test.uri!, test.range || new vscode.Range(0, 0, 0, 0));
|
||||
run.errored(test, msg);
|
||||
run.errored(test, new vscode.TestMessage(`Error starting debugger: ${error}`));
|
||||
}
|
||||
}
|
||||
run.appendOutput("\n\x1b[33mDebug session started. Please open the debug console to see its output.\x1b[0m\r\n");
|
||||
run.end();
|
||||
}
|
||||
|
||||
@@ -1461,32 +1318,6 @@ export class BunTestController implements vscode.Disposable {
|
||||
}
|
||||
this.disposables = [];
|
||||
}
|
||||
|
||||
// a sus way to expose internal functions to the test suite
|
||||
public get _internal() {
|
||||
return {
|
||||
expandEachTests: this.expandEachTests.bind(this),
|
||||
parseTestBlocks: this.parseTestBlocks.bind(this),
|
||||
getBraceDepth: this.getBraceDepth.bind(this),
|
||||
|
||||
buildTestNamePattern: this.buildTestNamePattern.bind(this),
|
||||
stripAnsi: this.stripAnsi.bind(this),
|
||||
processErrorData: this.processErrorData.bind(this),
|
||||
escapeTestName: this.escapeTestName.bind(this),
|
||||
shouldUseTestNamePattern: this.shouldUseTestNamePattern.bind(this),
|
||||
|
||||
isTestFile: this.isTestFile.bind(this),
|
||||
customFilePattern: this.customFilePattern.bind(this),
|
||||
getBunExecutionConfig: this.getBunExecutionConfig.bind(this),
|
||||
|
||||
findTestByPath: this.findTestByPath.bind(this),
|
||||
findTestByName: this.findTestByName.bind(this),
|
||||
createTestItem: this.createTestItem.bind(this),
|
||||
|
||||
createErrorMessage: this.createErrorMessage.bind(this),
|
||||
cleanupTestItem: this.cleanupTestItem.bind(this),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function windowsVscodeUri(uri: string): string {
|
||||
|
||||
@@ -7,14 +7,8 @@ export async function registerTests(context: vscode.ExtensionContext) {
|
||||
return;
|
||||
}
|
||||
|
||||
const config = vscode.workspace.getConfiguration("bun.test");
|
||||
const enable = config.get<boolean>("enable", true);
|
||||
if (!enable) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const controller = vscode.tests.createTestController("bun", "Bun Tests");
|
||||
const controller = vscode.tests.createTestController("bun-tests", "Bun Tests");
|
||||
context.subscriptions.push(controller);
|
||||
|
||||
const bunTestController = new BunTestController(controller, workspaceFolder);
|
||||
|
||||
0
packages/bun-vscode/src/features/tests/types.ts
Normal file
0
packages/bun-vscode/src/features/tests/types.ts
Normal file
@@ -2,8 +2,8 @@
|
||||
+++ CMakeLists.txt
|
||||
@@ -1,5 +1,5 @@
|
||||
#
|
||||
-cmake_minimum_required(VERSION 3.17 FATAL_ERROR)
|
||||
+cmake_minimum_required(VERSION 3.17...3.30 FATAL_ERROR)
|
||||
|
||||
PROJECT(libarchive C)
|
||||
#
|
||||
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12 FATAL_ERROR)
|
||||
+CMAKE_MINIMUM_REQUIRED(VERSION 2.8.12...3.5 FATAL_ERROR)
|
||||
if(POLICY CMP0065)
|
||||
cmake_policy(SET CMP0065 NEW) #3.4 don't use `-rdynamic` with executables
|
||||
endif()
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:29:58.505101515 +0000
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c 2025-07-21 06:44:09.023676935 +0000
|
||||
@@ -59,12 +59,13 @@
|
||||
--- a/libarchive/archive_write_add_filter_gzip.c
|
||||
+++ b/libarchive/archive_write_add_filter_gzip.c
|
||||
@@ -58,6 +58,7 @@ archive_write_set_compression_gzip(struct archive *a)
|
||||
struct private_data {
|
||||
int compression_level;
|
||||
int timestamp;
|
||||
char *original_filename;
|
||||
+ unsigned char os;
|
||||
+ unsigned char os;
|
||||
#ifdef HAVE_ZLIB_H
|
||||
z_stream stream;
|
||||
int64_t total_in;
|
||||
unsigned char *compressed;
|
||||
size_t compressed_buffer_size;
|
||||
- unsigned long crc;
|
||||
+ uint32_t crc;
|
||||
#else
|
||||
struct archive_write_program_data *pdata;
|
||||
#endif
|
||||
@@ -108,6 +109,7 @@
|
||||
@@ -106,6 +107,7 @@ archive_write_add_filter_gzip(struct archive *_a)
|
||||
archive_set_error(&a->archive, ENOMEM, "Out of memory");
|
||||
return (ARCHIVE_FATAL);
|
||||
}
|
||||
f->data = data;
|
||||
+ data->os = 3; /* default Unix */
|
||||
f->data = data;
|
||||
f->open = &archive_compressor_gzip_open;
|
||||
f->options = &archive_compressor_gzip_options;
|
||||
f->close = &archive_compressor_gzip_close;
|
||||
@@ -177,6 +179,30 @@
|
||||
@@ -166,6 +168,30 @@ archive_compressor_gzip_options(struct archive_write_filter *f, const char *key,
|
||||
return (ARCHIVE_OK);
|
||||
}
|
||||
|
||||
@@ -54,7 +47,7 @@
|
||||
/* Note: The "warn" return is just to inform the options
|
||||
* supervisor that we didn't handle it. It will generate
|
||||
* a suitable error if no one used this option. */
|
||||
@@ -236,7 +262,7 @@
|
||||
@@ -226,7 +252,7 @@ archive_compressor_gzip_open(struct archive_write_filter *f)
|
||||
data->compressed[8] = 4;
|
||||
else
|
||||
data->compressed[8] = 0;
|
||||
|
||||
@@ -44,21 +44,17 @@ if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${
|
||||
await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`;
|
||||
|
||||
console.log("moving cores out of nested directory");
|
||||
for await (const file of new Bun.Glob("bun-cores-*/*.core").scan(dir)) {
|
||||
for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) {
|
||||
fs.renameSync(join(dir, file), join(dir, basename(file)));
|
||||
}
|
||||
} else {
|
||||
console.log(`already downloaded in ${dir}`);
|
||||
}
|
||||
|
||||
const desiredCore = join(dir, (await new Bun.Glob(`*${pid}.core`).scan(dir).next()).value);
|
||||
|
||||
const args = [debuggerPath, "--core", desiredCore, join(dir, "bun-profile")];
|
||||
|
||||
console.log("launching debugger:");
|
||||
console.log(args.map(Bun.$.escape).join(" "));
|
||||
console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`);
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], {
|
||||
stdin: "inherit",
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
|
||||
@@ -20,7 +20,7 @@ async function globSources(output, patterns, excludes = []) {
|
||||
|
||||
const sources =
|
||||
paths
|
||||
.map(path => normalize(relative(root, path).replaceAll("\\", "/")))
|
||||
.map(path => normalize(relative(root, path)))
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.join("\n")
|
||||
.trim() + "\n";
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* p-limit@6.2.0
|
||||
* https://github.com/sindresorhus/p-limit
|
||||
* MIT (c) Sindre Sorhus
|
||||
*/
|
||||
|
||||
import Queue from "./yocto-queue.mjs";
|
||||
|
||||
export default function pLimit(concurrency) {
|
||||
validateConcurrency(concurrency);
|
||||
|
||||
const queue = new Queue();
|
||||
let activeCount = 0;
|
||||
|
||||
const resumeNext = () => {
|
||||
if (activeCount < concurrency && queue.size > 0) {
|
||||
queue.dequeue()();
|
||||
// Since `pendingCount` has been decreased by one, increase `activeCount` by one.
|
||||
activeCount++;
|
||||
}
|
||||
};
|
||||
|
||||
const next = () => {
|
||||
activeCount--;
|
||||
|
||||
resumeNext();
|
||||
};
|
||||
|
||||
const run = async (function_, resolve, arguments_) => {
|
||||
const result = (async () => function_(...arguments_))();
|
||||
|
||||
resolve(result);
|
||||
|
||||
try {
|
||||
await result;
|
||||
} catch {}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
const enqueue = (function_, resolve, arguments_) => {
|
||||
// Queue `internalResolve` instead of the `run` function
|
||||
// to preserve asynchronous context.
|
||||
new Promise(internalResolve => {
|
||||
queue.enqueue(internalResolve);
|
||||
}).then(run.bind(undefined, function_, resolve, arguments_));
|
||||
|
||||
(async () => {
|
||||
// This function needs to wait until the next microtask before comparing
|
||||
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
|
||||
// after the `internalResolve` function is dequeued and called. The comparison in the if-statement
|
||||
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
|
||||
await Promise.resolve();
|
||||
|
||||
if (activeCount < concurrency) {
|
||||
resumeNext();
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
const generator = (function_, ...arguments_) =>
|
||||
new Promise(resolve => {
|
||||
enqueue(function_, resolve, arguments_);
|
||||
});
|
||||
|
||||
Object.defineProperties(generator, {
|
||||
activeCount: {
|
||||
get: () => activeCount,
|
||||
},
|
||||
pendingCount: {
|
||||
get: () => queue.size,
|
||||
},
|
||||
clearQueue: {
|
||||
value() {
|
||||
queue.clear();
|
||||
},
|
||||
},
|
||||
concurrency: {
|
||||
get: () => concurrency,
|
||||
|
||||
set(newConcurrency) {
|
||||
validateConcurrency(newConcurrency);
|
||||
concurrency = newConcurrency;
|
||||
|
||||
queueMicrotask(() => {
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while (activeCount < concurrency && queue.size > 0) {
|
||||
resumeNext();
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return generator;
|
||||
}
|
||||
|
||||
export function limitFunction(function_, option) {
|
||||
const { concurrency } = option;
|
||||
const limit = pLimit(concurrency);
|
||||
|
||||
return (...arguments_) => limit(() => function_(...arguments_));
|
||||
}
|
||||
|
||||
function validateConcurrency(concurrency) {
|
||||
if (!((Number.isInteger(concurrency) || concurrency === Number.POSITIVE_INFINITY) && concurrency > 0)) {
|
||||
throw new TypeError("Expected `concurrency` to be a number from 1 and up");
|
||||
}
|
||||
}
|
||||
@@ -28,10 +28,9 @@ import {
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { availableParallelism, userInfo } from "node:os";
|
||||
import { userInfo } from "node:os";
|
||||
import { basename, dirname, extname, join, relative, sep } from "node:path";
|
||||
import { parseArgs } from "node:util";
|
||||
import pLimit from "./p-limit.mjs";
|
||||
import {
|
||||
getAbi,
|
||||
getAbiVersion,
|
||||
@@ -64,7 +63,6 @@ import {
|
||||
unzip,
|
||||
uploadArtifact,
|
||||
} from "./utils.mjs";
|
||||
|
||||
let isQuiet = false;
|
||||
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
|
||||
const testsPath = join(cwd, "test");
|
||||
@@ -155,10 +153,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "boolean",
|
||||
default: isBuildkite && isLinux,
|
||||
},
|
||||
["parallel"]: {
|
||||
type: "boolean",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -347,10 +341,6 @@ async function runTests() {
|
||||
const failedResults = [];
|
||||
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
|
||||
|
||||
const parallelism = options["parallel"] ? availableParallelism() : 1;
|
||||
console.log("parallelism", parallelism);
|
||||
const limit = pLimit(parallelism);
|
||||
|
||||
/**
|
||||
* @param {string} title
|
||||
* @param {function} fn
|
||||
@@ -360,21 +350,17 @@ async function runTests() {
|
||||
const index = ++i;
|
||||
|
||||
let result, failure, flaky;
|
||||
let attempt = 1;
|
||||
for (; attempt <= maxAttempts; attempt++) {
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
if (attempt > 1) {
|
||||
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
|
||||
}
|
||||
|
||||
let grouptitle = `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`;
|
||||
if (attempt > 1) grouptitle += ` ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`;
|
||||
|
||||
if (parallelism > 1) {
|
||||
console.log(grouptitle);
|
||||
result = await fn();
|
||||
} else {
|
||||
result = await startGroup(grouptitle, fn);
|
||||
}
|
||||
result = await startGroup(
|
||||
attempt === 1
|
||||
? `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`
|
||||
: `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title} ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`,
|
||||
fn,
|
||||
);
|
||||
|
||||
const { ok, stdoutPreview, error } = result;
|
||||
if (ok) {
|
||||
@@ -389,7 +375,6 @@ async function runTests() {
|
||||
const color = attempt >= maxAttempts ? "red" : "yellow";
|
||||
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
if (parallelism > 1) return;
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
|
||||
@@ -410,15 +395,14 @@ async function runTests() {
|
||||
// Group flaky tests together, regardless of the title
|
||||
const context = flaky ? "flaky" : title;
|
||||
const style = flaky || title.startsWith("vendor") ? "warning" : "error";
|
||||
if (!flaky) attempt = 1; // no need to show the retries count on failures, we know it maxed out
|
||||
|
||||
if (title.startsWith("vendor")) {
|
||||
const content = formatTestToMarkdown({ ...failure, testPath: title }, false, attempt - 1);
|
||||
const content = formatTestToMarkdown({ ...failure, testPath: title });
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
}
|
||||
} else {
|
||||
const content = formatTestToMarkdown(failure, false, attempt - 1);
|
||||
const content = formatTestToMarkdown(failure);
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
}
|
||||
@@ -428,10 +412,10 @@ async function runTests() {
|
||||
if (isGithubAction) {
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
if (summaryPath) {
|
||||
const longMarkdown = formatTestToMarkdown(failure, false, attempt - 1);
|
||||
const longMarkdown = formatTestToMarkdown(failure);
|
||||
appendFileSync(summaryPath, longMarkdown);
|
||||
}
|
||||
const shortMarkdown = formatTestToMarkdown(failure, true, attempt - 1);
|
||||
const shortMarkdown = formatTestToMarkdown(failure, true);
|
||||
appendFileSync("comment.md", shortMarkdown);
|
||||
}
|
||||
|
||||
@@ -450,62 +434,48 @@ async function runTests() {
|
||||
}
|
||||
|
||||
if (!failedResults.length) {
|
||||
await Promise.all(
|
||||
tests.map(testPath =>
|
||||
limit(() => {
|
||||
const absoluteTestPath = join(testsPath, testPath);
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
return runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [
|
||||
subcommand,
|
||||
"--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"),
|
||||
absoluteTestPath,
|
||||
],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env,
|
||||
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
const mb = 1024 ** 3;
|
||||
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
|
||||
return {
|
||||
testPath: title,
|
||||
ok: ok,
|
||||
status: ok ? "pass" : "fail",
|
||||
error: error,
|
||||
errors: [],
|
||||
tests: [],
|
||||
stdout: stdout,
|
||||
stdoutPreview: stdoutPreview,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
return runTest(title, async () =>
|
||||
spawnBunTest(execPath, join("test", testPath), {
|
||||
cwd,
|
||||
stdout: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: parallelism > 1 ? () => {} : chunk => pipeTestStdout(process.stderr, chunk),
|
||||
}),
|
||||
);
|
||||
}
|
||||
}),
|
||||
),
|
||||
);
|
||||
for (const testPath of tests) {
|
||||
const absoluteTestPath = join(testsPath, testPath);
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeTest(testPath)) {
|
||||
const testContent = readFileSync(absoluteTestPath, "utf-8");
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
const mb = 1024 ** 3;
|
||||
const stdoutPreview = stdout.slice(0, mb).split("\n").slice(0, 50).join("\n");
|
||||
return {
|
||||
testPath: title,
|
||||
ok: ok,
|
||||
status: ok ? "pass" : "fail",
|
||||
error: error,
|
||||
errors: [],
|
||||
tests: [],
|
||||
stdout: stdout,
|
||||
stdoutPreview: stdoutPreview,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (vendorTests?.length) {
|
||||
@@ -547,7 +517,7 @@ async function runTests() {
|
||||
|
||||
if (isGithubAction) {
|
||||
reportOutputToGitHubAction("failing_tests_count", failedResults.length);
|
||||
const markdown = formatTestToMarkdown(failedResults, false, 0);
|
||||
const markdown = formatTestToMarkdown(failedResults);
|
||||
reportOutputToGitHubAction("failing_tests", markdown);
|
||||
}
|
||||
|
||||
@@ -1089,7 +1059,7 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
|
||||
@@ -1098,8 +1068,8 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env,
|
||||
stdout: options.stdout,
|
||||
stderr: options.stderr,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
const { tests, errors, stdout: stdoutPreview } = parseTestStdout(stdout, testPath);
|
||||
|
||||
@@ -1761,10 +1731,9 @@ function getTestLabel() {
|
||||
/**
|
||||
* @param {TestResult | TestResult[]} result
|
||||
* @param {boolean} concise
|
||||
* @param {number} retries
|
||||
* @returns {string}
|
||||
*/
|
||||
function formatTestToMarkdown(result, concise, retries) {
|
||||
function formatTestToMarkdown(result, concise) {
|
||||
const results = Array.isArray(result) ? result : [result];
|
||||
const buildLabel = getTestLabel();
|
||||
const buildUrl = getBuildUrl();
|
||||
@@ -1808,9 +1777,6 @@ function formatTestToMarkdown(result, concise, retries) {
|
||||
if (platform) {
|
||||
markdown += ` on ${platform}`;
|
||||
}
|
||||
if (retries > 0) {
|
||||
markdown += ` (${retries} ${retries === 1 ? "retry" : "retries"})`;
|
||||
}
|
||||
|
||||
if (concise) {
|
||||
markdown += "</li>\n";
|
||||
|
||||
70
scripts/sort-imports.ts → scripts/sortImports.ts
Executable file → Normal file
70
scripts/sort-imports.ts → scripts/sortImports.ts
Executable file → Normal file
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env bun
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
@@ -17,9 +16,10 @@ const usage = String.raw`
|
||||
Usage: bun scripts/sortImports [options] <files...>
|
||||
|
||||
Options:
|
||||
--help Show this help message
|
||||
--include-pub Also sort ${"`pub`"} imports
|
||||
--keep-unused Don't remove unused imports
|
||||
--help Show this help message
|
||||
--no-include-pub Exclude pub imports from sorting
|
||||
--no-remove-unused Don't remove unused imports
|
||||
--include-unsorted Process files even if they don't have @sortImports marker
|
||||
|
||||
Examples:
|
||||
bun scripts/sortImports src
|
||||
@@ -34,9 +34,9 @@ if (filePaths.length === 0) {
|
||||
}
|
||||
|
||||
const config = {
|
||||
includePub: args.includes("--include-pub"),
|
||||
removeUnused: !args.includes("--keep-unused"),
|
||||
normalizePaths: "./",
|
||||
includePub: !args.includes("--no-include-pub"),
|
||||
removeUnused: !args.includes("--no-remove-unused"),
|
||||
includeUnsorted: args.includes("--include-unsorted"),
|
||||
};
|
||||
|
||||
// Type definitions
|
||||
@@ -68,11 +68,11 @@ function parseDeclarations(
|
||||
const line = lines[i];
|
||||
|
||||
if (line === "// @sortImports") {
|
||||
lines[i] = DELETED_LINE;
|
||||
lines[i] = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);(\s*\/\/[^\n]*)?$/;
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
|
||||
const match = line.match(inlineDeclPattern);
|
||||
|
||||
if (!match) continue;
|
||||
@@ -275,6 +275,8 @@ function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
|
||||
// Generate the sorted output
|
||||
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
|
||||
const outputLines = [...lines];
|
||||
outputLines.push("");
|
||||
outputLines.push("// @sortImports");
|
||||
|
||||
for (const groupKey of sortedGroupKeys) {
|
||||
const groupDeclarations = groups.get(groupKey)!;
|
||||
@@ -286,36 +288,22 @@ function generateSortedOutput(lines: string[], groups: Map<string, Group>, sorte
|
||||
// Add declarations to output and mark original lines for removal
|
||||
for (const declaration of groupDeclarations.declarations) {
|
||||
outputLines.push(declaration.whole);
|
||||
outputLines[declaration.index] = DELETED_LINE;
|
||||
outputLines[declaration.index] = "";
|
||||
}
|
||||
}
|
||||
|
||||
return outputLines;
|
||||
}
|
||||
|
||||
function extractThisDeclaration(declarations: Map<string, Declaration>): Declaration | null {
|
||||
for (const declaration of declarations.values()) {
|
||||
if (declaration.value === "@This()") {
|
||||
declarations.delete(declaration.key);
|
||||
return declaration;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const DELETED_LINE = "%DELETED_LINE%";
|
||||
|
||||
// Main execution function for a single file
|
||||
async function processFile(filePath: string): Promise<void> {
|
||||
const originalFileContents = await Bun.file(filePath).text();
|
||||
let fileContents = originalFileContents;
|
||||
|
||||
if (config.normalizePaths === "") {
|
||||
fileContents = fileContents.replaceAll(`@import("./`, `@import("`);
|
||||
} else if (config.normalizePaths === "./") {
|
||||
fileContents = fileContents.replaceAll(/@import\("([A-Za-z0-9_-][^"]*\.zig)"\)/g, '@import("./$1")');
|
||||
fileContents = fileContents.replaceAll(`@import("./../`, `@import("../`);
|
||||
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
|
||||
return;
|
||||
}
|
||||
console.log(`Processing: ${filePath}`);
|
||||
|
||||
let needsRecurse = true;
|
||||
while (needsRecurse) {
|
||||
@@ -324,7 +312,6 @@ async function processFile(filePath: string): Promise<void> {
|
||||
const lines = fileContents.split("\n");
|
||||
|
||||
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
|
||||
const thisDeclaration = extractThisDeclaration(declarations);
|
||||
const groups = groupDeclarationsByImportPath(declarations);
|
||||
|
||||
promoteItemsWithChildGroups(groups);
|
||||
@@ -336,33 +323,13 @@ async function processFile(filePath: string): Promise<void> {
|
||||
// Remove unused declarations
|
||||
if (config.removeUnused) {
|
||||
for (const line of unusedLineIndices) {
|
||||
sortedLines[line] = DELETED_LINE;
|
||||
sortedLines[line] = "";
|
||||
needsRecurse = true;
|
||||
}
|
||||
}
|
||||
if (thisDeclaration) {
|
||||
sortedLines[thisDeclaration.index] = DELETED_LINE;
|
||||
}
|
||||
if (thisDeclaration) {
|
||||
let firstNonFileCommentLine = 0;
|
||||
for (const line of sortedLines) {
|
||||
if (line.startsWith("//!")) {
|
||||
firstNonFileCommentLine++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const insert = [thisDeclaration.whole, ""];
|
||||
if (firstNonFileCommentLine > 0) insert.unshift("");
|
||||
sortedLines.splice(firstNonFileCommentLine, 0, ...insert);
|
||||
}
|
||||
fileContents = sortedLines.join("\n");
|
||||
}
|
||||
|
||||
// Remove deleted lines
|
||||
fileContents = fileContents.replaceAll(DELETED_LINE + "\n", "");
|
||||
// fileContents = fileContents.replaceAll(DELETED_LINE, ""); // any remaining lines
|
||||
|
||||
// Remove any leading newlines
|
||||
fileContents = fileContents.replace(/^\n+/, "");
|
||||
|
||||
@@ -376,6 +343,7 @@ async function processFile(filePath: string): Promise<void> {
|
||||
if (fileContents === "\n") fileContents = "";
|
||||
|
||||
if (fileContents === originalFileContents) {
|
||||
console.log(`✓ No changes: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -401,7 +369,7 @@ async function main() {
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${path.join(filePath, file)}:\n`, error);
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@@ -412,7 +380,7 @@ async function main() {
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}:\n`, error);
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2702,14 +2702,7 @@ export function reportAnnotationToBuildKite({ context, label, content, style = "
|
||||
source: "buildkite",
|
||||
level: "error",
|
||||
});
|
||||
reportAnnotationToBuildKite({
|
||||
context,
|
||||
label: `${label}-error`,
|
||||
content: errorContent,
|
||||
style,
|
||||
priority,
|
||||
attempt: attempt + 1,
|
||||
});
|
||||
reportAnnotationToBuildKite({ label: `${label}-error`, content: errorContent, attempt: attempt + 1 });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2857,14 +2850,6 @@ export function printEnvironment() {
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isLinux) {
|
||||
startGroup("Memory", () => {
|
||||
const shell = which(["sh", "bash"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "free -m -w"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isWindows) {
|
||||
startGroup("Disk (win)", () => {
|
||||
const shell = which(["pwsh"]);
|
||||
@@ -2872,14 +2857,6 @@ export function printEnvironment() {
|
||||
spawnSync([shell, "-c", "get-psdrive"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
startGroup("Memory", () => {
|
||||
const shell = which(["pwsh"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "Get-Counter '\\Memory\\Available MBytes'"], { stdio: "inherit" });
|
||||
console.log();
|
||||
spawnSync([shell, "-c", "Get-CimInstance Win32_PhysicalMemory"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
/**
|
||||
* yocto-queue@1.2.1
|
||||
* https://github.com/sindresorhus/yocto-queue
|
||||
* MIT (c) Sindre Sorhus
|
||||
*/
|
||||
|
||||
/*
|
||||
How it works:
|
||||
`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.
|
||||
*/
|
||||
|
||||
class Node {
|
||||
value;
|
||||
next;
|
||||
|
||||
constructor(value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
export default class Queue {
|
||||
#head;
|
||||
#tail;
|
||||
#size;
|
||||
|
||||
constructor() {
|
||||
this.clear();
|
||||
}
|
||||
|
||||
enqueue(value) {
|
||||
const node = new Node(value);
|
||||
|
||||
if (this.#head) {
|
||||
this.#tail.next = node;
|
||||
this.#tail = node;
|
||||
} else {
|
||||
this.#head = node;
|
||||
this.#tail = node;
|
||||
}
|
||||
|
||||
this.#size++;
|
||||
}
|
||||
|
||||
dequeue() {
|
||||
const current = this.#head;
|
||||
if (!current) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#head = this.#head.next;
|
||||
this.#size--;
|
||||
return current.value;
|
||||
}
|
||||
|
||||
peek() {
|
||||
if (!this.#head) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this.#head.value;
|
||||
|
||||
// TODO: Node.js 18.
|
||||
// return this.#head?.value;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.#head = undefined;
|
||||
this.#tail = undefined;
|
||||
this.#size = 0;
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.#size;
|
||||
}
|
||||
|
||||
*[Symbol.iterator]() {
|
||||
let current = this.#head;
|
||||
|
||||
while (current) {
|
||||
yield current.value;
|
||||
current = current.next;
|
||||
}
|
||||
}
|
||||
|
||||
*drain() {
|
||||
while (this.#head) {
|
||||
yield this.dequeue();
|
||||
}
|
||||
}
|
||||
}
|
||||
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
/**
|
||||
* Removes unreferenced top-level const declarations from a Zig file
|
||||
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
|
||||
*/
|
||||
export function removeUnreferencedImports(content: string): string {
|
||||
let modified = true;
|
||||
let result = content;
|
||||
|
||||
// Keep iterating until no more changes are made
|
||||
while (modified) {
|
||||
modified = false;
|
||||
const lines = result.split("\n");
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Match top-level const declarations: const <IDENTIFIER> = ...
|
||||
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
|
||||
|
||||
if (constMatch) {
|
||||
const identifier = constMatch[1];
|
||||
const assignmentPart = constMatch[2];
|
||||
|
||||
// Skip lines that contain '{' in the assignment (likely structs/objects)
|
||||
if (assignmentPart.includes("{")) {
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this identifier is referenced anywhere else in the file
|
||||
const isReferenced = isIdentifierReferenced(identifier, lines, i);
|
||||
|
||||
if (!isReferenced) {
|
||||
// Skip this line (delete it)
|
||||
modified = true;
|
||||
console.log(`Removing unreferenced import: ${identifier}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
newLines.push(line);
|
||||
}
|
||||
|
||||
result = newLines.join("\n");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an identifier is referenced anywhere in the file except at the declaration line
|
||||
*/
|
||||
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
|
||||
// Create a regex that matches the identifier as a whole word
|
||||
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
|
||||
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
// Skip the declaration line itself
|
||||
if (i === declarationLineIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const line = lines[i];
|
||||
|
||||
// Check if the identifier appears in this line
|
||||
if (identifierRegex.test(line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special regex characters in a string
|
||||
*/
|
||||
function escapeRegex(string: string): string {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single Zig file
|
||||
*/
|
||||
export function processZigFile(filePath: string): void {
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
const cleaned = removeUnreferencedImports(content);
|
||||
|
||||
if (content !== cleaned) {
|
||||
fs.writeFileSync(filePath, cleaned);
|
||||
console.log(`Cleaned: ${filePath}`);
|
||||
} else {
|
||||
console.log(`No changes: ${filePath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing ${filePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple Zig files or directories
|
||||
*/
|
||||
export function processFiles(paths: string[]): void {
|
||||
for (const inputPath of paths) {
|
||||
const stat = fs.statSync(inputPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// Process all .zig files in directory recursively
|
||||
processDirectory(inputPath);
|
||||
} else if (inputPath.endsWith(".zig")) {
|
||||
processZigFile(inputPath);
|
||||
} else {
|
||||
console.warn(`Skipping non-Zig file: ${inputPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively process all .zig files in a directory
|
||||
*/
|
||||
function processDirectory(dirPath: string): void {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
processDirectory(fullPath);
|
||||
} else if (entry.name.endsWith(".zig")) {
|
||||
processZigFile(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CLI usage
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
|
||||
console.log("");
|
||||
console.log("Examples:");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
processFiles(args);
|
||||
}
|
||||
@@ -1,4 +1,12 @@
|
||||
const Global = @This();
|
||||
const std = @import("std");
|
||||
const Environment = @import("./env.zig");
|
||||
|
||||
const Output = @import("output.zig");
|
||||
const use_mimalloc = bun.use_mimalloc;
|
||||
const Mimalloc = bun.Mimalloc;
|
||||
const bun = @import("bun");
|
||||
|
||||
const version_string = Environment.version_string;
|
||||
|
||||
/// Does not have the canary tag, because it is exposed in `Bun.version`
|
||||
/// "1.0.0" or "1.0.0-debug"
|
||||
@@ -166,10 +174,10 @@ pub const versions = @import("./generated_versions_list.zig");
|
||||
// 2. if I want to configure allocator later
|
||||
pub inline fn configureAllocator(_: AllocatorConfiguration) void {
|
||||
// if (comptime !use_mimalloc) return;
|
||||
// const mimalloc = bun.mimalloc;
|
||||
// mimalloc.mi_option_set_enabled(mimalloc.mi_option_verbose, config.verbose);
|
||||
// mimalloc.mi_option_set_enabled(mimalloc.mi_option_large_os_pages, config.long_running);
|
||||
// if (!config.long_running) mimalloc.mi_option_set(mimalloc.mi_option_reset_delay, 0);
|
||||
// const Mimalloc = @import("./allocators/mimalloc.zig");
|
||||
// Mimalloc.mi_option_set_enabled(Mimalloc.mi_option_verbose, config.verbose);
|
||||
// Mimalloc.mi_option_set_enabled(Mimalloc.mi_option_large_os_pages, config.long_running);
|
||||
// if (!config.long_running) Mimalloc.mi_option_set(Mimalloc.mi_option_reset_delay, 0);
|
||||
}
|
||||
|
||||
pub fn notimpl() noreturn {
|
||||
@@ -183,17 +191,20 @@ pub fn crash() noreturn {
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
const Global = @This();
|
||||
const string = bun.string;
|
||||
|
||||
pub const BunInfo = struct {
|
||||
bun_version: string,
|
||||
platform: analytics.GenerateHeader.GeneratePlatform.Platform,
|
||||
platform: Analytics.GenerateHeader.GeneratePlatform.Platform,
|
||||
|
||||
const analytics = bun.analytics;
|
||||
const JSON = bun.json;
|
||||
const JSAst = bun.ast;
|
||||
const Analytics = @import("./analytics/analytics_thread.zig");
|
||||
const JSON = bun.JSON;
|
||||
const JSAst = bun.JSAst;
|
||||
pub fn generate(comptime Bundler: type, _: Bundler, allocator: std.mem.Allocator) !JSAst.Expr {
|
||||
const info = BunInfo{
|
||||
.bun_version = Global.package_json_version,
|
||||
.platform = analytics.GenerateHeader.GeneratePlatform.forOS(),
|
||||
.platform = Analytics.GenerateHeader.GeneratePlatform.forOS(),
|
||||
};
|
||||
|
||||
return try JSON.toAST(allocator, BunInfo, info);
|
||||
@@ -208,7 +219,7 @@ comptime {
|
||||
}
|
||||
|
||||
pub export fn Bun__onExit() void {
|
||||
bun.jsc.Node.FSEvents.closeAndWait();
|
||||
bun.JSC.Node.FSEvents.closeAndWait();
|
||||
|
||||
runExitCallbacks();
|
||||
Output.flush();
|
||||
@@ -220,15 +231,3 @@ pub export fn Bun__onExit() void {
|
||||
comptime {
|
||||
_ = Bun__onExit;
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const Output = @import("./output.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const version_string = Environment.version_string;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Mimalloc = bun.mimalloc;
|
||||
const use_mimalloc = bun.use_mimalloc;
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
const HTMLScanner = @This();
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
const ImportKind = @import("./import_record.zig").ImportKind;
|
||||
const lol = @import("./deps/lol-html.zig");
|
||||
const logger = bun.logger;
|
||||
const fs = bun.fs;
|
||||
|
||||
allocator: std.mem.Allocator,
|
||||
import_records: ImportRecord.List = .{},
|
||||
@@ -297,12 +303,4 @@ pub fn HTMLProcessor(
|
||||
};
|
||||
}
|
||||
|
||||
const lol = @import("./deps/lol-html.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const ImportKind = @import("./import_record.zig").ImportKind;
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
|
||||
const bun = @import("bun");
|
||||
const fs = bun.fs;
|
||||
const logger = bun.logger;
|
||||
const HTMLScanner = @This();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
const OutputFile = @This();
|
||||
|
||||
// Instead of keeping files in-memory, we:
|
||||
// 1. Write directly to disk
|
||||
// 2. (Optional) move the file to the destination
|
||||
@@ -15,31 +13,15 @@ hash: u64 = 0,
|
||||
is_executable: bool = false,
|
||||
source_map_index: u32 = std.math.maxInt(u32),
|
||||
bytecode_index: u32 = std.math.maxInt(u32),
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
output_kind: JSC.API.BuildArtifact.OutputKind,
|
||||
/// Relative
|
||||
dest_path: []const u8 = "",
|
||||
side: ?bun.bake.Side,
|
||||
/// This is only set for the JS bundle, and not files associated with an
|
||||
/// entrypoint like sourcemaps and bytecode
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
referenced_css_files: []const Index = &.{},
|
||||
source_index: Index.Optional = .none,
|
||||
bake_extra: BakeExtra = .{},
|
||||
|
||||
pub const zero_value = OutputFile{
|
||||
.loader = .file,
|
||||
.src_path = Fs.Path.init(""),
|
||||
.value = .noop,
|
||||
.output_kind = .chunk,
|
||||
.side = null,
|
||||
.entry_point_index = null,
|
||||
};
|
||||
|
||||
pub const BakeExtra = struct {
|
||||
is_route: bool = false,
|
||||
fully_static: bool = false,
|
||||
bake_is_runtime: bool = false,
|
||||
};
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
|
||||
@@ -48,7 +30,7 @@ pub fn deinit(this: *OutputFile) void {
|
||||
|
||||
bun.default_allocator.free(this.src_path.text);
|
||||
bun.default_allocator.free(this.dest_path);
|
||||
bun.default_allocator.free(this.referenced_css_chunks);
|
||||
bun.default_allocator.free(this.referenced_css_files);
|
||||
}
|
||||
|
||||
// Depending on:
|
||||
@@ -117,13 +99,6 @@ pub const Value = union(Kind) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn asSlice(v: Value) []const u8 {
|
||||
return switch (v) {
|
||||
.buffer => |buf| buf.bytes,
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toBunString(v: Value) bun.String {
|
||||
return switch (v) {
|
||||
.noop => bun.String.empty,
|
||||
@@ -154,14 +129,14 @@ pub const Value = union(Kind) {
|
||||
|
||||
pub const SavedFile = struct {
|
||||
pub fn toJS(
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
path: []const u8,
|
||||
byte_size: usize,
|
||||
) jsc.JSValue {
|
||||
) JSC.JSValue {
|
||||
const mime_type = globalThis.bunVM().mimeType(path);
|
||||
const store = jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{
|
||||
const store = JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{
|
||||
.string = bun.PathString.init(path),
|
||||
},
|
||||
},
|
||||
@@ -169,12 +144,12 @@ pub const SavedFile = struct {
|
||||
bun.default_allocator,
|
||||
) catch unreachable;
|
||||
|
||||
var blob = bun.default_allocator.create(jsc.WebCore.Blob) catch unreachable;
|
||||
blob.* = jsc.WebCore.Blob.initWithStore(store, globalThis);
|
||||
var blob = bun.default_allocator.create(JSC.WebCore.Blob) catch unreachable;
|
||||
blob.* = JSC.WebCore.Blob.initWithStore(store, globalThis);
|
||||
if (mime_type) |mime| {
|
||||
blob.content_type = mime.value;
|
||||
}
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(byte_size));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(byte_size));
|
||||
blob.allocator = bun.default_allocator;
|
||||
return blob.toJS(globalThis);
|
||||
}
|
||||
@@ -215,7 +190,7 @@ pub const Options = struct {
|
||||
size: ?usize = null,
|
||||
input_path: []const u8 = "",
|
||||
display_size: u32 = 0,
|
||||
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||
output_kind: JSC.API.BuildArtifact.OutputKind,
|
||||
is_executable: bool,
|
||||
data: union(enum) {
|
||||
buffer: struct {
|
||||
@@ -231,8 +206,7 @@ pub const Options = struct {
|
||||
},
|
||||
side: ?bun.bake.Side,
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
bake_extra: BakeExtra = .{},
|
||||
referenced_css_files: []const Index = &.{},
|
||||
};
|
||||
|
||||
pub fn init(options: Options) OutputFile {
|
||||
@@ -266,8 +240,7 @@ pub fn init(options: Options) OutputFile {
|
||||
},
|
||||
.side = options.side,
|
||||
.entry_point_index = options.entry_point_index,
|
||||
.referenced_css_chunks = options.referenced_css_chunks,
|
||||
.bake_extra = options.bake_extra,
|
||||
.referenced_css_files = options.referenced_css_files,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -289,7 +262,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u
|
||||
}
|
||||
|
||||
var path_buf: bun.PathBuffer = undefined;
|
||||
_ = try jsc.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
||||
_ = try JSC.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
||||
.data = .{ .buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(value.bytes.ptr),
|
||||
@@ -344,20 +317,20 @@ pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: Fil
|
||||
pub fn toJS(
|
||||
this: *OutputFile,
|
||||
owned_pathname: ?[]const u8,
|
||||
globalObject: *jsc.JSGlobalObject,
|
||||
) bun.jsc.JSValue {
|
||||
globalObject: *JSC.JSGlobalObject,
|
||||
) bun.JSC.JSValue {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => .js_undefined,
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.fd = copy.fd,
|
||||
}
|
||||
else
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
||||
globalObject.allocator(),
|
||||
@@ -365,8 +338,8 @@ pub fn toJS(
|
||||
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
|
||||
};
|
||||
|
||||
var build_output = bun.new(jsc.API.BuildArtifact, .{
|
||||
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
var build_output = bun.new(JSC.API.BuildArtifact, .{
|
||||
.blob = JSC.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
.output_kind = this.output_kind,
|
||||
@@ -383,12 +356,12 @@ pub fn toJS(
|
||||
break :brk build_output.toJS(globalObject);
|
||||
},
|
||||
.saved => brk: {
|
||||
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
var build_output = bun.default_allocator.create(JSC.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
const path_to_use = owned_pathname orelse this.src_path.text;
|
||||
|
||||
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
|
||||
const file_blob = JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
|
||||
},
|
||||
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
||||
globalObject.allocator(),
|
||||
@@ -403,8 +376,8 @@ pub fn toJS(
|
||||
},
|
||||
};
|
||||
|
||||
build_output.* = jsc.API.BuildArtifact{
|
||||
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
build_output.* = JSC.API.BuildArtifact{
|
||||
.blob = JSC.WebCore.Blob.initWithStore(file_blob, globalObject),
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
.output_kind = this.output_kind,
|
||||
@@ -414,7 +387,7 @@ pub fn toJS(
|
||||
break :brk build_output.toJS(globalObject);
|
||||
},
|
||||
.buffer => |buffer| brk: {
|
||||
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
|
||||
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
|
||||
if (blob.store) |store| {
|
||||
store.mime_type = this.loader.toMimeType(&.{owned_pathname orelse ""});
|
||||
blob.content_type = store.mime_type.value;
|
||||
@@ -422,10 +395,10 @@ pub fn toJS(
|
||||
blob.content_type = this.loader.toMimeType(&.{owned_pathname orelse ""}).value;
|
||||
}
|
||||
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
|
||||
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
build_output.* = jsc.API.BuildArtifact{
|
||||
var build_output = bun.default_allocator.create(JSC.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
||||
build_output.* = JSC.API.BuildArtifact{
|
||||
.blob = blob,
|
||||
.hash = this.hash,
|
||||
.loader = this.input_loader,
|
||||
@@ -448,20 +421,20 @@ pub fn toJS(
|
||||
pub fn toBlob(
|
||||
this: *OutputFile,
|
||||
allocator: std.mem.Allocator,
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
) !jsc.WebCore.Blob {
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
) !JSC.WebCore.Blob {
|
||||
return switch (this.value) {
|
||||
.move, .pending => @panic("Unexpected pending output file"),
|
||||
.noop => @panic("Cannot convert noop output file to blob"),
|
||||
.copy => |copy| brk: {
|
||||
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
||||
const file_blob = try JSC.WebCore.Blob.Store.initFile(
|
||||
if (copy.fd.isValid())
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.fd = copy.fd,
|
||||
}
|
||||
else
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
||||
allocator,
|
||||
@@ -474,12 +447,12 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
break :brk JSC.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
},
|
||||
.saved => brk: {
|
||||
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
||||
jsc.Node.PathOrFileDescriptor{
|
||||
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
|
||||
const file_blob = try JSC.WebCore.Blob.Store.initFile(
|
||||
JSC.Node.PathOrFileDescriptor{
|
||||
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
|
||||
},
|
||||
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
||||
allocator,
|
||||
@@ -492,10 +465,10 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
break :brk JSC.WebCore.Blob.initWithStore(file_blob, globalThis);
|
||||
},
|
||||
.buffer => |buffer| brk: {
|
||||
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
|
||||
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
|
||||
if (blob.store) |store| {
|
||||
store.mime_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text });
|
||||
blob.content_type = store.mime_type.value;
|
||||
@@ -510,22 +483,22 @@ pub fn toBlob(
|
||||
},
|
||||
};
|
||||
|
||||
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
||||
break :brk blob;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const OutputFile = @This();
|
||||
const string = []const u8;
|
||||
|
||||
const resolve_path = @import("./resolver/resolve_path.zig");
|
||||
const resolver = @import("./resolver/resolver.zig");
|
||||
const std = @import("std");
|
||||
const Loader = @import("./options.zig").Loader;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const FileDescriptorType = bun.FileDescriptor;
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const Fs = bun.fs;
|
||||
const jsc = bun.jsc;
|
||||
const Output = bun.Global.Output;
|
||||
const Loader = @import("./options.zig").Loader;
|
||||
const resolver = @import("./resolver/resolver.zig");
|
||||
const resolve_path = @import("./resolver/resolve_path.zig");
|
||||
const Output = @import("./Global.zig").Output;
|
||||
const Environment = bun.Environment;
|
||||
|
||||
@@ -14,7 +14,12 @@
|
||||
//! * `refresh_rate_ms`
|
||||
//! * `initial_delay_ms`
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const windows = std.os.windows;
|
||||
const assert = bun.assert;
|
||||
const Progress = @This();
|
||||
const bun = @import("bun");
|
||||
|
||||
/// `null` if the current node (and its children) should
|
||||
/// not print on update()
|
||||
@@ -448,10 +453,3 @@ test "basic functionality" {
|
||||
node.end();
|
||||
}
|
||||
}
|
||||
|
||||
const builtin = @import("builtin");
|
||||
const std = @import("std");
|
||||
const windows = std.os.windows;
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
@@ -1,6 +1,20 @@
|
||||
//! Originally, we tried using LIEF to inject the module graph into a MachO segment
|
||||
//! But this incurred a fixed 350ms overhead on every build, which is unacceptable
|
||||
//! so we give up on codesigning support on macOS for now until we can find a better solution
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
const Schema = bun.Schema.Api;
|
||||
const strings = bun.strings;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const Syscall = bun.sys;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
|
||||
const macho = bun.macho;
|
||||
const pe = bun.pe;
|
||||
const w = std.os.windows;
|
||||
|
||||
pub const StandaloneModuleGraph = struct {
|
||||
bytes: []const u8 = "",
|
||||
@@ -182,7 +196,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
return this.wtf_string.dupeRef();
|
||||
}
|
||||
|
||||
pub fn blob(this: *File, globalObject: *bun.jsc.JSGlobalObject) *bun.webcore.Blob {
|
||||
pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.webcore.Blob {
|
||||
if (this.cached_blob == null) {
|
||||
const store = bun.webcore.Blob.Store.init(@constCast(this.contents), bun.default_allocator);
|
||||
// make it never free
|
||||
@@ -718,10 +732,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
};
|
||||
// Set executable permissions when running on POSIX hosts, even for Windows targets
|
||||
if (comptime !Environment.isWindows) {
|
||||
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
|
||||
}
|
||||
return cloned_executable_fd;
|
||||
},
|
||||
else => {
|
||||
@@ -1226,13 +1236,13 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
// the allocator given to the JS parser is not respected for all parts
|
||||
// of the parse, so we need to remember to reset the ast store
|
||||
bun.ast.Expr.Data.Store.reset();
|
||||
bun.ast.Stmt.Data.Store.reset();
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
bun.JSAst.Stmt.Data.Store.reset();
|
||||
defer {
|
||||
bun.ast.Expr.Data.Store.reset();
|
||||
bun.ast.Stmt.Data.Store.reset();
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
bun.JSAst.Stmt.Data.Store.reset();
|
||||
}
|
||||
var json = bun.json.parse(&json_src, &log, arena, false) catch
|
||||
var json = bun.JSON.parse(&json_src, &log, arena, false) catch
|
||||
return error.InvalidSourceMap;
|
||||
|
||||
const mappings_str = json.get("mappings") orelse
|
||||
@@ -1310,18 +1320,3 @@ pub const StandaloneModuleGraph = struct {
|
||||
bun.assert(header_list.items.len == string_payload_start_location);
|
||||
}
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const w = std.os.windows;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const Output = bun.Output;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
const Syscall = bun.sys;
|
||||
const macho = bun.macho;
|
||||
const pe = bun.pe;
|
||||
const strings = bun.strings;
|
||||
const Schema = bun.schema.api;
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
// https://github.com/lithdew/rheia/blob/162293d0f0e8d6572a8954c0add83f13f76b3cc6/hash_map.zig
|
||||
// Apache License 2.0
|
||||
const std = @import("std");
|
||||
|
||||
const mem = std.mem;
|
||||
const math = std.math;
|
||||
const testing = std.testing;
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
|
||||
return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage);
|
||||
@@ -777,11 +785,3 @@ test "SortedHashMap: collision test" {
|
||||
try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?);
|
||||
try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?);
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
|
||||
const std = @import("std");
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
//! Bun's cross-platform filesystem watcher. Runs on its own thread.
|
||||
|
||||
const Watcher = @This();
|
||||
|
||||
const DebugLogScope = bun.Output.Scoped(.watcher, false);
|
||||
const log = DebugLogScope.log;
|
||||
|
||||
@@ -128,6 +126,7 @@ pub fn getHash(filepath: string) HashType {
|
||||
|
||||
pub const WatchItemIndex = u16;
|
||||
pub const max_eviction_count = 8096;
|
||||
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
|
||||
// TODO: some platform-specific behavior is implemented in
|
||||
// this file instead of the platform-specific file.
|
||||
// ideally, the constants above can be inlined
|
||||
@@ -289,7 +288,7 @@ pub fn flushEvictions(this: *Watcher) void {
|
||||
}
|
||||
}
|
||||
|
||||
fn watchLoop(this: *Watcher) bun.jsc.Maybe(void) {
|
||||
fn watchLoop(this: *Watcher) bun.JSC.Maybe(void) {
|
||||
while (this.running) {
|
||||
// individual platform implementation will call onFileUpdate
|
||||
switch (Platform.watchLoopCycle(this)) {
|
||||
@@ -309,7 +308,7 @@ fn appendFileAssumeCapacity(
|
||||
parent_hash: HashType,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.jsc.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
const rel = bun.path.isParentOrEqual(this.fs.top_level_dir, file_path);
|
||||
@@ -390,7 +389,7 @@ fn appendDirectoryAssumeCapacity(
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.jsc.Maybe(WatchItemIndex) {
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
const rel = bun.path.isParentOrEqual(this.fs.top_level_dir, file_path);
|
||||
@@ -501,7 +500,7 @@ pub fn appendFileMaybeLock(
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
comptime lock: bool,
|
||||
) bun.jsc.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime lock) this.mutex.lock();
|
||||
defer if (comptime lock) this.mutex.unlock();
|
||||
bun.assert(file_path.len > 1);
|
||||
@@ -577,7 +576,7 @@ pub fn appendFile(
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.jsc.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, true);
|
||||
}
|
||||
|
||||
@@ -587,7 +586,7 @@ pub fn addDirectory(
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.jsc.Maybe(WatchItemIndex) {
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
this.mutex.lock();
|
||||
defer this.mutex.unlock();
|
||||
|
||||
@@ -609,7 +608,7 @@ pub fn addFile(
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.jsc.Maybe(void) {
|
||||
) bun.JSC.Maybe(void) {
|
||||
// This must lock due to concurrent transpiler
|
||||
this.mutex.lock();
|
||||
defer this.mutex.unlock();
|
||||
@@ -674,16 +673,13 @@ pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: bun.Sto
|
||||
}
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const WindowsWatcher = @import("./watcher/WindowsWatcher.zig");
|
||||
const options = @import("./options.zig");
|
||||
const std = @import("std");
|
||||
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const Mutex = bun.Mutex;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const options = @import("./options.zig");
|
||||
const Mutex = bun.Mutex;
|
||||
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
pub const c_allocator = @import("./allocators/basic.zig").c_allocator;
|
||||
pub const z_allocator = @import("./allocators/basic.zig").z_allocator;
|
||||
pub const mimalloc = @import("./allocators/mimalloc.zig");
|
||||
pub const MimallocArena = @import("./allocators/MimallocArena.zig");
|
||||
pub const AllocationScope = @import("./allocators/AllocationScope.zig");
|
||||
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
|
||||
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
|
||||
pub const MemoryReportingAllocator = @import("./allocators/MemoryReportingAllocator.zig");
|
||||
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
@@ -294,6 +290,8 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
};
|
||||
}
|
||||
|
||||
const Mutex = bun.Mutex;
|
||||
|
||||
/// Append-only list.
|
||||
/// Stores an initial count in .bss section of the object file
|
||||
/// Overflows to heap when count is exceeded.
|
||||
@@ -771,10 +769,3 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const Environment = @import("./env.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
const Mutex = bun.threading.Mutex;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
|
||||
const AllocationScope = @This();
|
||||
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
@@ -254,7 +253,6 @@ pub inline fn downcast(a: Allocator) ?*AllocationScope {
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Output = bun.Output;
|
||||
const StoredTrace = bun.crash_handler.StoredTrace;
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
//! When cloning large amounts of data potentially multiple times, we can
|
||||
//! leverage copy-on-write memory to avoid actually copying the data. To do that
|
||||
//! on Linux, we need to use a memfd, which is a Linux-specific feature.
|
||||
//!
|
||||
//! The steps are roughly:
|
||||
//!
|
||||
//! 1. Create a memfd
|
||||
//! 2. Write the data to the memfd
|
||||
//! 3. Map the memfd into memory
|
||||
//!
|
||||
//! Then, to clone the data later, we can just call `mmap` again.
|
||||
//!
|
||||
//! The big catch is that mmap(), memfd_create(), write() all have overhead. And
|
||||
//! often we will re-use virtual memory within the process. This does not reuse
|
||||
//! the virtual memory. So we should only really use this for large blobs of
|
||||
//! data that we expect to be cloned multiple times. Such as Blob in FormData.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{});
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
ref_count: RefCount,
|
||||
fd: bun.FileDescriptor = .invalid,
|
||||
size: usize = 0,
|
||||
|
||||
var memfd_counter = std.atomic.Value(usize).init(0);
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
self.fd.close();
|
||||
bun.destroy(self);
|
||||
}
|
||||
|
||||
pub fn allocator(self: *Self) std.mem.Allocator {
|
||||
return .{
|
||||
.ptr = self,
|
||||
.vtable = AllocatorInterface.VTable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn from(allocator_: std.mem.Allocator) ?*Self {
|
||||
if (allocator_.vtable == AllocatorInterface.VTable) {
|
||||
return @alignCast(@ptrCast(allocator_.ptr));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const AllocatorInterface = struct {
|
||||
fn alloc(_: *anyopaque, _: usize, _: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
// it should perform no allocations or resizes
|
||||
return null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
ptr: *anyopaque,
|
||||
buf: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
var self: *Self = @alignCast(@ptrCast(ptr));
|
||||
defer self.deref();
|
||||
bun.sys.munmap(@alignCast(@ptrCast(buf))).unwrap() catch |err| {
|
||||
bun.Output.debugWarn("Failed to munmap memfd: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
pub const VTable = &std.mem.Allocator.VTable{
|
||||
.alloc = &AllocatorInterface.alloc,
|
||||
.resize = &std.mem.Allocator.noResize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &free,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn alloc(self: *Self, len: usize, offset: usize, flags: std.posix.MAP) bun.jsc.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
var size = len;
|
||||
|
||||
// size rounded up to nearest page
|
||||
size = std.mem.alignForward(usize, size, std.heap.pageSize());
|
||||
|
||||
var flags_mut = flags;
|
||||
flags_mut.TYPE = .SHARED;
|
||||
|
||||
switch (bun.sys.mmap(
|
||||
null,
|
||||
@min(size, self.size),
|
||||
std.posix.PROT.READ | std.posix.PROT.WRITE,
|
||||
flags_mut,
|
||||
self.fd,
|
||||
offset,
|
||||
)) {
|
||||
.result => |slice| {
|
||||
return .{
|
||||
.result = bun.webcore.Blob.Store.Bytes{
|
||||
.cap = @truncate(slice.len),
|
||||
.ptr = slice.ptr,
|
||||
.len = @truncate(len),
|
||||
.allocator = self.allocator(),
|
||||
},
|
||||
};
|
||||
},
|
||||
.err => |errno| {
|
||||
return .{ .err = errno };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shouldUse(bytes: []const u8) bool {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (bun.jsc.VirtualMachine.is_smol_mode) {
|
||||
return bytes.len >= 1024 * 1024 * 1;
|
||||
}
|
||||
|
||||
// This is a net 2x - 4x slowdown to new Blob([huge])
|
||||
// so we must be careful
|
||||
return bytes.len >= 1024 * 1024 * 8;
|
||||
}
|
||||
|
||||
pub fn create(bytes: []const u8) bun.jsc.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var label_buf: [128]u8 = undefined;
|
||||
const label = std.fmt.bufPrintZ(&label_buf, "memfd-num-{d}", .{memfd_counter.fetchAdd(1, .monotonic)}) catch "";
|
||||
|
||||
// Using huge pages was slower.
|
||||
const fd = switch (bun.sys.memfd_create(label, std.os.linux.MFD.CLOEXEC)) {
|
||||
.err => |err| return .{ .err = bun.sys.Error.fromCode(err.getErrno(), .open) },
|
||||
.result => |fd| fd,
|
||||
};
|
||||
|
||||
if (bytes.len > 0)
|
||||
// Hint at the size of the file
|
||||
_ = bun.sys.ftruncate(fd, @intCast(bytes.len));
|
||||
|
||||
// Dump all the bytes in there
|
||||
var written: isize = 0;
|
||||
|
||||
var remain = bytes;
|
||||
while (remain.len > 0) {
|
||||
switch (bun.sys.pwrite(fd, remain, written)) {
|
||||
.err => |err| {
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bun.Output.debugWarn("Failed to write to memfd: {}", .{err});
|
||||
fd.close();
|
||||
return .{ .err = err };
|
||||
},
|
||||
.result => |result| {
|
||||
if (result == 0) {
|
||||
bun.Output.debugWarn("Failed to write to memfd: EOF", .{});
|
||||
fd.close();
|
||||
return .{ .err = bun.sys.Error.fromCode(.NOMEM, .write) };
|
||||
}
|
||||
written += @intCast(result);
|
||||
remain = remain[result..];
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var linux_memfd_allocator = Self.new(.{
|
||||
.fd = fd,
|
||||
.ref_count = .init(),
|
||||
.size = bytes.len,
|
||||
});
|
||||
|
||||
switch (linux_memfd_allocator.alloc(bytes.len, 0, .{ .TYPE = .SHARED })) {
|
||||
.result => |res| {
|
||||
return .{ .result = res };
|
||||
},
|
||||
.err => |err| {
|
||||
linux_memfd_allocator.deref();
|
||||
return .{ .err = err };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
@@ -1,54 +0,0 @@
|
||||
//! Single allocation only.
|
||||
|
||||
const Self = @This();
|
||||
|
||||
array_list: std.ArrayListAligned(u8, @alignOf(std.c.max_align_t)),
|
||||
|
||||
fn alloc(ptr: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
bun.assert(alignment.toByteUnits() <= @alignOf(std.c.max_align_t));
|
||||
var self = bun.cast(*Self, ptr);
|
||||
self.array_list.items.len = 0;
|
||||
self.array_list.ensureTotalCapacity(len) catch return null;
|
||||
self.array_list.items.len = len;
|
||||
return self.array_list.items.ptr;
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
|
||||
_ = new_len;
|
||||
_ = buf;
|
||||
@panic("not implemented");
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
_: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {}
|
||||
|
||||
pub fn reset(self: *Self) void {
|
||||
self.array_list.items.len = 0;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.array_list.deinit();
|
||||
}
|
||||
|
||||
const vtable = std.mem.Allocator.VTable{
|
||||
.alloc = &alloc,
|
||||
.free = &free,
|
||||
.resize = &resize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
};
|
||||
|
||||
pub fn init(self: *Self, allocator: std.mem.Allocator) std.mem.Allocator {
|
||||
self.array_list = .init(allocator);
|
||||
|
||||
return std.mem.Allocator{
|
||||
.ptr = self,
|
||||
.vtable = &vtable,
|
||||
};
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
@@ -1,5 +1,4 @@
|
||||
const MemoryReportingAllocator = @This();
|
||||
|
||||
const log = bun.Output.scoped(.MEM, false);
|
||||
|
||||
child_allocator: std.mem.Allocator,
|
||||
@@ -85,8 +84,7 @@ pub const VTable = std.mem.Allocator.VTable{
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const jsc = bun.jsc;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const jsc = bun.jsc;
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
const Self = @This();
|
||||
|
||||
heap: ?*mimalloc.Heap = null,
|
||||
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadlocalDefault() Allocator {
|
||||
return Allocator{ .ptr = mimalloc.mi_heap_get_default(), .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn backingAllocator(self: Self) Allocator {
|
||||
var arena = Self{ .heap = self.heap.?.backing() };
|
||||
return arena.allocator();
|
||||
}
|
||||
|
||||
pub fn allocator(self: Self) Allocator {
|
||||
@setRuntimeSafety(false);
|
||||
return Allocator{ .ptr = self.heap.?, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn dumpThreadStats(self: *Self) void {
|
||||
_ = self;
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_thread_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn dumpStats(self: *Self) void {
|
||||
_ = self;
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
mimalloc.mi_heap_destroy(bun.take(&self.heap).?);
|
||||
}
|
||||
pub fn init() !Self {
|
||||
return .{ .heap = mimalloc.mi_heap_new() orelse return error.OutOfMemory };
|
||||
}
|
||||
|
||||
pub fn gc(self: Self) void {
|
||||
mimalloc.mi_heap_collect(self.heap orelse return, false);
|
||||
}
|
||||
|
||||
pub inline fn helpCatchMemoryIssues(self: Self) void {
|
||||
if (comptime FeatureFlags.help_catch_memory_issues) {
|
||||
self.gc();
|
||||
bun.mimalloc.mi_collect(false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.heap.?, ptr);
|
||||
}
|
||||
pub const supports_posix_memalign = true;
|
||||
|
||||
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
return mimalloc.mi_malloc_usable_size(ptr);
|
||||
}
|
||||
|
||||
fn alloc(arena: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
|
||||
const self = bun.cast(*mimalloc.Heap, arena);
|
||||
|
||||
return alignedAlloc(
|
||||
self,
|
||||
len,
|
||||
alignment,
|
||||
);
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
buf: []u8,
|
||||
alignment: mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
// mi_free_size internally just asserts the size
|
||||
// so it's faster if we don't pass that value through
|
||||
// but its good to have that assertion
|
||||
if (comptime Environment.isDebug) {
|
||||
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
|
||||
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
|
||||
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_free_size(buf.ptr, buf.len);
|
||||
} else {
|
||||
mimalloc.mi_free(buf.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn remap(self: *anyopaque, buf: []u8, alignment: mem.Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(@ptrCast(self), buf.ptr, new_len, aligned_size);
|
||||
return @ptrCast(value);
|
||||
}
|
||||
|
||||
const c_allocator_vtable = Allocator.VTable{
|
||||
.alloc = &Self.alloc,
|
||||
.resize = &Self.resize,
|
||||
.remap = &Self.remap,
|
||||
.free = &Self.free,
|
||||
};
|
||||
|
||||
const Environment = @import("../env.zig");
|
||||
const FeatureFlags = @import("../feature_flags.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
@@ -1,4 +1,6 @@
|
||||
//! A nullable allocator the same size as `std.mem.Allocator`.
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
|
||||
const NullableAllocator = @This();
|
||||
|
||||
@@ -44,6 +46,3 @@ comptime {
|
||||
@compileError("Expected the sizes to be the same.");
|
||||
}
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
187
src/allocators/linux_memfd_allocator.zig
Normal file
187
src/allocators/linux_memfd_allocator.zig
Normal file
@@ -0,0 +1,187 @@
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
/// When cloning large amounts of data potentially multiple times, we can
|
||||
/// leverage copy-on-write memory to avoid actually copying the data. To do that
|
||||
/// on Linux, we need to use a memfd, which is a Linux-specific feature.
|
||||
///
|
||||
/// The steps are roughly:
|
||||
///
|
||||
/// 1. Create a memfd
|
||||
/// 2. Write the data to the memfd
|
||||
/// 3. Map the memfd into memory
|
||||
///
|
||||
/// Then, to clone the data later, we can just call `mmap` again.
|
||||
///
|
||||
/// The big catch is that mmap(), memfd_create(), write() all have overhead. And
|
||||
/// often we will re-use virtual memory within the process. This does not reuse
|
||||
/// the virtual memory. So we should only really use this for large blobs of
|
||||
/// data that we expect to be cloned multiple times. Such as Blob in FormData.
|
||||
pub const LinuxMemFdAllocator = struct {
|
||||
const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
|
||||
ref_count: RefCount,
|
||||
fd: bun.FileDescriptor = .invalid,
|
||||
size: usize = 0,
|
||||
|
||||
var memfd_counter = std.atomic.Value(usize).init(0);
|
||||
|
||||
fn deinit(this: *LinuxMemFdAllocator) void {
|
||||
this.fd.close();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn allocator(this: *LinuxMemFdAllocator) std.mem.Allocator {
|
||||
return .{
|
||||
.ptr = this,
|
||||
.vtable = AllocatorInterface.VTable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn from(allocator_: std.mem.Allocator) ?*LinuxMemFdAllocator {
|
||||
if (allocator_.vtable == AllocatorInterface.VTable) {
|
||||
return @alignCast(@ptrCast(allocator_.ptr));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const AllocatorInterface = struct {
|
||||
fn alloc(_: *anyopaque, _: usize, _: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
// it should perform no allocations or resizes
|
||||
return null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
ptr: *anyopaque,
|
||||
buf: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
var this: *LinuxMemFdAllocator = @alignCast(@ptrCast(ptr));
|
||||
defer this.deref();
|
||||
bun.sys.munmap(@alignCast(@ptrCast(buf))).unwrap() catch |err| {
|
||||
bun.Output.debugWarn("Failed to munmap memfd: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
pub const VTable = &std.mem.Allocator.VTable{
|
||||
.alloc = &AllocatorInterface.alloc,
|
||||
.resize = &std.mem.Allocator.noResize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &free,
|
||||
};
|
||||
};
|
||||
|
||||
pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
var size = len;
|
||||
|
||||
// size rounded up to nearest page
|
||||
size = std.mem.alignForward(usize, size, std.heap.pageSize());
|
||||
|
||||
var flags_mut = flags;
|
||||
flags_mut.TYPE = .SHARED;
|
||||
|
||||
switch (bun.sys.mmap(
|
||||
null,
|
||||
@min(size, this.size),
|
||||
std.posix.PROT.READ | std.posix.PROT.WRITE,
|
||||
flags_mut,
|
||||
this.fd,
|
||||
offset,
|
||||
)) {
|
||||
.result => |slice| {
|
||||
return .{
|
||||
.result = bun.webcore.Blob.Store.Bytes{
|
||||
.cap = @truncate(slice.len),
|
||||
.ptr = slice.ptr,
|
||||
.len = @truncate(len),
|
||||
.allocator = this.allocator(),
|
||||
},
|
||||
};
|
||||
},
|
||||
.err => |errno| {
|
||||
return .{ .err = errno };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shouldUse(bytes: []const u8) bool {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (bun.JSC.VirtualMachine.is_smol_mode) {
|
||||
return bytes.len >= 1024 * 1024 * 1;
|
||||
}
|
||||
|
||||
// This is a net 2x - 4x slowdown to new Blob([huge])
|
||||
// so we must be careful
|
||||
return bytes.len >= 1024 * 1024 * 8;
|
||||
}
|
||||
|
||||
pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
|
||||
if (comptime !bun.Environment.isLinux) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var label_buf: [128]u8 = undefined;
|
||||
const label = std.fmt.bufPrintZ(&label_buf, "memfd-num-{d}", .{memfd_counter.fetchAdd(1, .monotonic)}) catch "";
|
||||
|
||||
// Using huge pages was slower.
|
||||
const fd = switch (bun.sys.memfd_create(label, std.os.linux.MFD.CLOEXEC)) {
|
||||
.err => |err| return .{ .err = bun.sys.Error.fromCode(err.getErrno(), .open) },
|
||||
.result => |fd| fd,
|
||||
};
|
||||
|
||||
if (bytes.len > 0)
|
||||
// Hint at the size of the file
|
||||
_ = bun.sys.ftruncate(fd, @intCast(bytes.len));
|
||||
|
||||
// Dump all the bytes in there
|
||||
var written: isize = 0;
|
||||
|
||||
var remain = bytes;
|
||||
while (remain.len > 0) {
|
||||
switch (bun.sys.pwrite(fd, remain, written)) {
|
||||
.err => |err| {
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bun.Output.debugWarn("Failed to write to memfd: {}", .{err});
|
||||
fd.close();
|
||||
return .{ .err = err };
|
||||
},
|
||||
.result => |result| {
|
||||
if (result == 0) {
|
||||
bun.Output.debugWarn("Failed to write to memfd: EOF", .{});
|
||||
fd.close();
|
||||
return .{ .err = bun.sys.Error.fromCode(.NOMEM, .write) };
|
||||
}
|
||||
written += @intCast(result);
|
||||
remain = remain[result..];
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var linux_memfd_allocator = LinuxMemFdAllocator.new(.{
|
||||
.fd = fd,
|
||||
.ref_count = .init(),
|
||||
.size = bytes.len,
|
||||
});
|
||||
|
||||
switch (linux_memfd_allocator.alloc(bytes.len, 0, .{ .TYPE = .SHARED })) {
|
||||
.result => |res| {
|
||||
return .{ .result = res };
|
||||
},
|
||||
.err => |err| {
|
||||
linux_memfd_allocator.deref();
|
||||
return .{ .err = err };
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
53
src/allocators/max_heap_allocator.zig
Normal file
53
src/allocators/max_heap_allocator.zig
Normal file
@@ -0,0 +1,53 @@
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
/// Single allocation only.
|
||||
///
|
||||
pub const MaxHeapAllocator = struct {
|
||||
array_list: std.ArrayListAligned(u8, @alignOf(std.c.max_align_t)),
|
||||
|
||||
fn alloc(ptr: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 {
|
||||
bun.assert(alignment.toByteUnits() <= @alignOf(std.c.max_align_t));
|
||||
var this = bun.cast(*MaxHeapAllocator, ptr);
|
||||
this.array_list.items.len = 0;
|
||||
this.array_list.ensureTotalCapacity(len) catch return null;
|
||||
this.array_list.items.len = len;
|
||||
return this.array_list.items.ptr;
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
|
||||
_ = new_len;
|
||||
_ = buf;
|
||||
@panic("not implemented");
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
_: []u8,
|
||||
_: std.mem.Alignment,
|
||||
_: usize,
|
||||
) void {}
|
||||
|
||||
pub fn reset(this: *MaxHeapAllocator) void {
|
||||
this.array_list.items.len = 0;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *MaxHeapAllocator) void {
|
||||
this.array_list.deinit();
|
||||
}
|
||||
|
||||
const vtable = std.mem.Allocator.VTable{
|
||||
.alloc = &alloc,
|
||||
.free = &free,
|
||||
.resize = &resize,
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
};
|
||||
pub fn init(this: *MaxHeapAllocator, allocator: std.mem.Allocator) std.mem.Allocator {
|
||||
this.array_list = .init(allocator);
|
||||
|
||||
return std.mem.Allocator{
|
||||
.ptr = this,
|
||||
.vtable = &vtable,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,4 +1,11 @@
|
||||
const mem = @import("std").mem;
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
const assert = bun.assert;
|
||||
const Allocator = mem.Allocator;
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
const Environment = @import("../env.zig");
|
||||
|
||||
fn mimalloc_free(
|
||||
_: *anyopaque,
|
||||
@@ -143,13 +150,3 @@ const z_allocator_vtable = Allocator.VTable{
|
||||
.remap = &std.mem.Allocator.noRemap,
|
||||
.free = &ZAllocator.free_with_z_allocator,
|
||||
};
|
||||
|
||||
const Environment = @import("../env.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
|
||||
const mem = @import("std").mem;
|
||||
const Allocator = mem.Allocator;
|
||||
@@ -202,6 +202,7 @@ pub const MI_SMALL_WSIZE_MAX = @as(c_int, 128);
|
||||
pub const MI_SMALL_SIZE_MAX = MI_SMALL_WSIZE_MAX * @import("std").zig.c_translation.sizeof(?*anyopaque);
|
||||
pub const MI_ALIGNMENT_MAX = (@as(c_int, 16) * @as(c_int, 1024)) * @as(c_ulong, 1024);
|
||||
|
||||
const std = @import("std");
|
||||
pub fn canUseAlignedAlloc(len: usize, alignment: usize) bool {
|
||||
return alignment > 0 and std.math.isPowerOfTwo(alignment) and !mi_malloc_satisfies_alignment(alignment, len);
|
||||
}
|
||||
@@ -210,5 +211,3 @@ inline fn mi_malloc_satisfies_alignment(alignment: usize, size: usize) bool {
|
||||
return (alignment == @sizeOf(*anyopaque) or
|
||||
(alignment == MI_MAX_ALIGN_SIZE and size >= (MI_MAX_ALIGN_SIZE / 2)));
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
169
src/allocators/mimalloc_arena.zig
Normal file
169
src/allocators/mimalloc_arena.zig
Normal file
@@ -0,0 +1,169 @@
|
||||
const mem = @import("std").mem;
|
||||
const std = @import("std");
|
||||
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
const Environment = @import("../env.zig");
|
||||
const FeatureFlags = @import("../feature_flags.zig");
|
||||
const Allocator = mem.Allocator;
|
||||
const assert = bun.assert;
|
||||
const bun = @import("bun");
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
|
||||
pub const Arena = struct {
|
||||
heap: ?*mimalloc.Heap = null,
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadlocalDefault() Allocator {
|
||||
return Allocator{ .ptr = mimalloc.mi_heap_get_default(), .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn backingAllocator(this: Arena) Allocator {
|
||||
var arena = Arena{ .heap = this.heap.?.backing() };
|
||||
return arena.allocator();
|
||||
}
|
||||
|
||||
pub fn allocator(this: Arena) Allocator {
|
||||
@setRuntimeSafety(false);
|
||||
return Allocator{ .ptr = this.heap.?, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn dumpThreadStats(_: *Arena) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_thread_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn dumpStats(_: *Arena) void {
|
||||
const dump_fn = struct {
|
||||
pub fn dump(textZ: [*:0]const u8, _: ?*anyopaque) callconv(.C) void {
|
||||
const text = bun.span(textZ);
|
||||
bun.Output.errorWriter().writeAll(text) catch {};
|
||||
}
|
||||
}.dump;
|
||||
mimalloc.mi_stats_print_out(dump_fn, null);
|
||||
bun.Output.flush();
|
||||
}
|
||||
|
||||
pub fn deinit(this: *Arena) void {
|
||||
mimalloc.mi_heap_destroy(bun.take(&this.heap).?);
|
||||
}
|
||||
pub fn init() !Arena {
|
||||
const arena = Arena{ .heap = mimalloc.mi_heap_new() orelse return error.OutOfMemory };
|
||||
return arena;
|
||||
}
|
||||
|
||||
pub fn gc(this: Arena) void {
|
||||
mimalloc.mi_heap_collect(this.heap orelse return, false);
|
||||
}
|
||||
|
||||
pub inline fn helpCatchMemoryIssues(this: Arena) void {
|
||||
if (comptime FeatureFlags.help_catch_memory_issues) {
|
||||
this.gc();
|
||||
bun.Mimalloc.mi_collect(false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(this: Arena, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(this.heap.?, ptr);
|
||||
}
|
||||
pub const supports_posix_memalign = true;
|
||||
|
||||
fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: mem.Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment.toByteUnits()))
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
if (usable < len) {
|
||||
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
|
||||
}
|
||||
}
|
||||
|
||||
return if (ptr) |p|
|
||||
@as([*]u8, @ptrCast(p))
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
return mimalloc.mi_malloc_usable_size(ptr);
|
||||
}
|
||||
|
||||
fn alloc(arena: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
|
||||
const this = bun.cast(*mimalloc.Heap, arena);
|
||||
|
||||
return alignedAlloc(
|
||||
this,
|
||||
len,
|
||||
alignment,
|
||||
);
|
||||
}
|
||||
|
||||
fn resize(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
fn free(
|
||||
_: *anyopaque,
|
||||
buf: []u8,
|
||||
alignment: mem.Alignment,
|
||||
_: usize,
|
||||
) void {
|
||||
// mi_free_size internally just asserts the size
|
||||
// so it's faster if we don't pass that value through
|
||||
// but its good to have that assertion
|
||||
if (comptime Environment.isDebug) {
|
||||
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
|
||||
if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits()))
|
||||
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_free_size(buf.ptr, buf.len);
|
||||
} else {
|
||||
mimalloc.mi_free(buf.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn remap(this: *anyopaque, buf: []u8, alignment: mem.Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(@ptrCast(this), buf.ptr, new_len, aligned_size);
|
||||
return @ptrCast(value);
|
||||
}
|
||||
};
|
||||
|
||||
const c_allocator_vtable = Allocator.VTable{
|
||||
.alloc = &Arena.alloc,
|
||||
.resize = &Arena.resize,
|
||||
.remap = &Arena.remap,
|
||||
.free = &Arena.free,
|
||||
};
|
||||
@@ -1,3 +1,5 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub const Reader = struct {
|
||||
const Self = @This();
|
||||
pub const ReadError = error{EOF};
|
||||
@@ -516,5 +518,3 @@ pub const analytics = struct {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
@@ -1,3 +1,11 @@
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Analytics = @import("./analytics_schema.zig").analytics;
|
||||
const Semver = bun.Semver;
|
||||
|
||||
/// Enables analytics. This is used by:
|
||||
/// - crash_handler.zig's `report` function to anonymously report crashes
|
||||
///
|
||||
@@ -254,7 +262,7 @@ pub const EventName = enum(u8) {
|
||||
|
||||
var random: std.rand.DefaultPrng = undefined;
|
||||
|
||||
const platform_arch = if (Environment.isAarch64) analytics.Architecture.arm else analytics.Architecture.x64;
|
||||
const platform_arch = if (Environment.isAarch64) Analytics.Architecture.arm else Analytics.Architecture.x64;
|
||||
|
||||
// TODO: move this code somewhere more appropriate, and remove it from "analytics"
|
||||
// The following code is not currently even used for analytics, just feature-detection
|
||||
@@ -262,10 +270,10 @@ const platform_arch = if (Environment.isAarch64) analytics.Architecture.arm else
|
||||
pub const GenerateHeader = struct {
|
||||
pub const GeneratePlatform = struct {
|
||||
var osversion_name: [32]u8 = undefined;
|
||||
fn forMac() analytics.Platform {
|
||||
fn forMac() Analytics.Platform {
|
||||
@memset(&osversion_name, 0);
|
||||
|
||||
var platform = analytics.Platform{ .os = analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var platform = Analytics.Platform{ .os = Analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var len = osversion_name.len - 1;
|
||||
// this previously used "kern.osrelease", which was the darwin xnu kernel version
|
||||
// That is less useful than "kern.osproductversion", which is the macOS version
|
||||
@@ -276,8 +284,8 @@ pub const GenerateHeader = struct {
|
||||
}
|
||||
|
||||
pub var linux_os_name: std.c.utsname = undefined;
|
||||
var platform_: analytics.Platform = undefined;
|
||||
pub const Platform = analytics.Platform;
|
||||
var platform_: Analytics.Platform = undefined;
|
||||
pub const Platform = Analytics.Platform;
|
||||
var linux_kernel_version: Semver.Version = undefined;
|
||||
var run_once = std.once(struct {
|
||||
fn run() void {
|
||||
@@ -292,7 +300,7 @@ pub const GenerateHeader = struct {
|
||||
linux_kernel_version = result.version.min();
|
||||
} else if (Environment.isWindows) {
|
||||
platform_ = Platform{
|
||||
.os = analytics.OperatingSystem.windows,
|
||||
.os = Analytics.OperatingSystem.windows,
|
||||
.version = &[_]u8{},
|
||||
.arch = platform_arch,
|
||||
};
|
||||
@@ -300,7 +308,7 @@ pub const GenerateHeader = struct {
|
||||
}
|
||||
}.run);
|
||||
|
||||
pub fn forOS() analytics.Platform {
|
||||
pub fn forOS() Analytics.Platform {
|
||||
run_once.call();
|
||||
return platform_;
|
||||
}
|
||||
@@ -350,7 +358,7 @@ pub const GenerateHeader = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn forLinux() analytics.Platform {
|
||||
fn forLinux() Analytics.Platform {
|
||||
linux_os_name = std.mem.zeroes(@TypeOf(linux_os_name));
|
||||
|
||||
_ = std.c.uname(&linux_os_name);
|
||||
@@ -360,17 +368,10 @@ pub const GenerateHeader = struct {
|
||||
|
||||
// Linux DESKTOP-P4LCIEM 5.10.16.3-microsoft-standard-WSL2 #1 SMP Fri Apr 2 22:23:49 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
|
||||
if (std.mem.indexOf(u8, release, "microsoft") != null) {
|
||||
return analytics.Platform{ .os = analytics.OperatingSystem.wsl, .version = release, .arch = platform_arch };
|
||||
return Analytics.Platform{ .os = Analytics.OperatingSystem.wsl, .version = release, .arch = platform_arch };
|
||||
}
|
||||
|
||||
return analytics.Platform{ .os = analytics.OperatingSystem.linux, .version = release, .arch = platform_arch };
|
||||
return Analytics.Platform{ .os = Analytics.OperatingSystem.linux, .version = release, .arch = platform_arch };
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
const analytics = @import("./analytics/schema.zig").analytics;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Semver = bun.Semver;
|
||||
@@ -1,4 +1,4 @@
|
||||
package api;
|
||||
package Api;
|
||||
|
||||
smol Loader {
|
||||
jsx = 1;
|
||||
@@ -64,7 +64,7 @@ struct StackTrace {
|
||||
message JSException {
|
||||
string name = 1;
|
||||
string message = 2;
|
||||
|
||||
|
||||
uint16 runtime_type = 3;
|
||||
uint8 code = 4;
|
||||
|
||||
@@ -103,7 +103,7 @@ message FallbackMessageContainer {
|
||||
Problems problems = 4;
|
||||
string cwd = 5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
smol ResolveMode {
|
||||
disable = 1;
|
||||
@@ -178,18 +178,18 @@ struct JavascriptBundle {
|
||||
// These are sorted alphabetically so you can do binary search
|
||||
JavascriptBundledModule[] modules;
|
||||
JavascriptBundledPackage[] packages;
|
||||
|
||||
|
||||
// This is ASCII-encoded so you can send it directly over HTTP
|
||||
byte[] etag;
|
||||
|
||||
uint32 generated_at;
|
||||
|
||||
|
||||
// generated by hashing all ${name}@${version} in sorted order
|
||||
byte[] app_package_json_dependencies_hash;
|
||||
|
||||
byte[] import_from_name;
|
||||
|
||||
// This is what StringPointer refers to
|
||||
// This is what StringPointer refers to
|
||||
byte[] manifest_string;
|
||||
}
|
||||
|
||||
@@ -359,7 +359,7 @@ smol SourceMapMode {
|
||||
struct FileHandle {
|
||||
string path;
|
||||
uint size;
|
||||
uint fd;
|
||||
uint fd;
|
||||
}
|
||||
|
||||
message Transform {
|
||||
@@ -462,7 +462,7 @@ struct Log {
|
||||
|
||||
smol Reloader {
|
||||
disable = 1;
|
||||
// equivalent of CMD + R
|
||||
// equivalent of CMD + R
|
||||
live = 2;
|
||||
// React Fast Refresh
|
||||
fast_refresh = 3;
|
||||
@@ -534,7 +534,7 @@ struct WebsocketMessageBuildSuccess {
|
||||
|
||||
Loader loader;
|
||||
string module_path;
|
||||
|
||||
|
||||
// This is the length of the blob that immediately follows this message.
|
||||
uint32 blob_length;
|
||||
}
|
||||
@@ -630,4 +630,4 @@ struct TestResponseItem {
|
||||
struct GetTestsResponse {
|
||||
TestResponseItem[] tests;
|
||||
byte[] contents;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,8 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const js_ast = bun.JSAst;
|
||||
const OOM = bun.OOM;
|
||||
|
||||
pub const Reader = struct {
|
||||
const Self = @This();
|
||||
pub const ReadError = error{EOF};
|
||||
@@ -320,7 +325,7 @@ pub fn Writer(comptime WritableStream: type) type {
|
||||
pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8));
|
||||
pub const FileWriter = Writer(std.fs.File);
|
||||
|
||||
pub const api = struct {
|
||||
pub const Api = struct {
|
||||
pub const Loader = enum(u8) {
|
||||
_none,
|
||||
jsx,
|
||||
@@ -421,7 +426,7 @@ pub const api = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const StackFramePosition = bun.jsc.ZigStackFramePosition;
|
||||
pub const StackFramePosition = bun.JSC.ZigStackFramePosition;
|
||||
|
||||
pub const SourceLine = struct {
|
||||
/// line
|
||||
@@ -1951,27 +1956,6 @@ pub const api = struct {
|
||||
|
||||
_,
|
||||
|
||||
pub fn fromJS(global: *bun.jsc.JSGlobalObject, value: bun.jsc.JSValue) bun.JSError!?SourceMapMode {
|
||||
if (value.isString()) {
|
||||
const str = try value.toSliceOrNull(global);
|
||||
defer str.deinit();
|
||||
const utf8 = str.slice();
|
||||
if (bun.strings.eqlComptime(utf8, "none")) {
|
||||
return .none;
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "inline")) {
|
||||
return .@"inline";
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "external")) {
|
||||
return .external;
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "linked")) {
|
||||
return .linked;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
@@ -2875,13 +2859,13 @@ pub const api = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) OOM!api.NpmRegistry {
|
||||
pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) OOM!Api.NpmRegistry {
|
||||
return try this.parseRegistryURLStringImpl(str.data);
|
||||
}
|
||||
|
||||
pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) OOM!api.NpmRegistry {
|
||||
pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) OOM!Api.NpmRegistry {
|
||||
const url = bun.URL.parse(str);
|
||||
var registry = std.mem.zeroes(api.NpmRegistry);
|
||||
var registry = std.mem.zeroes(Api.NpmRegistry);
|
||||
|
||||
// Token
|
||||
if (url.username.len == 0 and url.password.len > 0) {
|
||||
@@ -2900,8 +2884,8 @@ pub const api = struct {
|
||||
return registry;
|
||||
}
|
||||
|
||||
fn parseRegistryObject(this: *Parser, obj: *js_ast.E.Object) !api.NpmRegistry {
|
||||
var registry = std.mem.zeroes(api.NpmRegistry);
|
||||
fn parseRegistryObject(this: *Parser, obj: *js_ast.E.Object) !Api.NpmRegistry {
|
||||
var registry = std.mem.zeroes(Api.NpmRegistry);
|
||||
|
||||
if (obj.get("url")) |url| {
|
||||
try this.expectString(url);
|
||||
@@ -2928,7 +2912,7 @@ pub const api = struct {
|
||||
return registry;
|
||||
}
|
||||
|
||||
pub fn parseRegistry(this: *Parser, expr: js_ast.Expr) !api.NpmRegistry {
|
||||
pub fn parseRegistry(this: *Parser, expr: js_ast.Expr) !Api.NpmRegistry {
|
||||
switch (expr.data) {
|
||||
.e_string => |str| {
|
||||
return this.parseRegistryURLString(str);
|
||||
@@ -2938,7 +2922,7 @@ pub const api = struct {
|
||||
},
|
||||
else => {
|
||||
try this.addError(expr.loc, "Expected registry to be a URL string or an object");
|
||||
return std.mem.zeroes(api.NpmRegistry);
|
||||
return std.mem.zeroes(Api.NpmRegistry);
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -3365,9 +3349,3 @@ pub const api = struct {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
const js_ast = bun.ast;
|
||||
|
||||
@@ -85,10 +85,12 @@ pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) voi
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
@@ -112,9 +112,7 @@ pub fn deinit(this: *Ast) void {
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
const Runtime = @import("../runtime.zig").Runtime;
|
||||
@@ -123,13 +121,13 @@ const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Expr = js_ast.Expr;
|
||||
const G = js_ast.G;
|
||||
const InlinedEnumValue = js_ast.InlinedEnumValue;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const NamedExport = js_ast.NamedExport;
|
||||
@@ -140,3 +138,6 @@ const RefHashCtx = js_ast.RefHashCtx;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -87,18 +87,20 @@ pub const B = union(Binding.Tag) {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Class = G.Class;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ArrayBinding = js_ast.ArrayBinding;
|
||||
const Binding = js_ast.Binding;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const Flags = js_ast.Flags;
|
||||
const G = js_ast.G;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -147,14 +147,15 @@ pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding
|
||||
}
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
|
||||
@@ -201,24 +201,18 @@ pub fn addUrlForCss(
|
||||
}
|
||||
}
|
||||
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const BundledAst = js_ast.BundledAst;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
@@ -228,3 +222,10 @@ const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const TlaCheck = js_ast.TlaCheck;
|
||||
|
||||
const Ast = js_ast.Ast;
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
|
||||
@@ -124,14 +124,16 @@ pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier
|
||||
return minifier;
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
// @sortImports
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const G = js_ast.G;
|
||||
const NameMinifier = js_ast.NameMinifier;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -66,9 +66,9 @@ pub const Array = struct {
|
||||
return ExprNodeList.init(out[0 .. out.len - remain.len]);
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
const items = this.items.slice();
|
||||
var array = try jsc.JSValue.createEmptyArray(globalObject, items.len);
|
||||
var array = try JSC.JSValue.createEmptyArray(globalObject, items.len);
|
||||
array.protect();
|
||||
defer array.unprotect();
|
||||
for (items, 0..) |expr, j| {
|
||||
@@ -108,8 +108,8 @@ pub const Binary = struct {
|
||||
|
||||
pub const Boolean = struct {
|
||||
value: bool,
|
||||
pub fn toJS(this: @This(), ctx: *jsc.JSGlobalObject) jsc.C.JSValueRef {
|
||||
return jsc.C.JSValueMakeBoolean(ctx, this.value);
|
||||
pub fn toJS(this: @This(), ctx: *JSC.JSGlobalObject) JSC.C.JSValueRef {
|
||||
return JSC.C.JSValueMakeBoolean(ctx, this.value);
|
||||
}
|
||||
};
|
||||
pub const Super = struct {};
|
||||
@@ -466,8 +466,8 @@ pub const Number = struct {
|
||||
return try writer.write(self.value);
|
||||
}
|
||||
|
||||
pub fn toJS(this: @This()) jsc.JSValue {
|
||||
return jsc.JSValue.jsNumber(this.value);
|
||||
pub fn toJS(this: @This()) JSC.JSValue {
|
||||
return JSC.JSValue.jsNumber(this.value);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -480,9 +480,9 @@ pub const BigInt = struct {
|
||||
return try writer.write(self.value);
|
||||
}
|
||||
|
||||
pub fn toJS(_: @This()) jsc.JSValue {
|
||||
pub fn toJS(_: @This()) JSC.JSValue {
|
||||
// TODO:
|
||||
return jsc.JSValue.jsNumber(0);
|
||||
return JSC.JSValue.jsNumber(0);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -515,8 +515,8 @@ pub const Object = struct {
|
||||
return if (asProperty(self, key)) |query| query.expr else @as(?Expr, null);
|
||||
}
|
||||
|
||||
pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
var obj = jsc.JSValue.createEmptyObject(globalObject, this.properties.len);
|
||||
pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
var obj = JSC.JSValue.createEmptyObject(globalObject, this.properties.len);
|
||||
obj.protect();
|
||||
defer obj.unprotect();
|
||||
const props: []const G.Property = this.properties.slice();
|
||||
@@ -955,7 +955,7 @@ pub const String = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) ![]const u8 {
|
||||
pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) !bun.string {
|
||||
if (str.isUTF8()) {
|
||||
return allocator.dupe(u8, str.string(allocator) catch unreachable);
|
||||
}
|
||||
@@ -1005,7 +1005,7 @@ pub const String = struct {
|
||||
return strings.utf16EqlString(other.slice16(), s.data);
|
||||
}
|
||||
},
|
||||
[]const u8 => {
|
||||
bun.string => {
|
||||
return strings.eqlLong(s.data, other, true);
|
||||
},
|
||||
[]u16, []const u16 => {
|
||||
@@ -1024,7 +1024,7 @@ pub const String = struct {
|
||||
return std.mem.eql(u16, other.slice16(), s.slice16());
|
||||
}
|
||||
},
|
||||
[]const u8 => {
|
||||
bun.string => {
|
||||
return strings.utf16EqlString(s.slice16(), other);
|
||||
},
|
||||
[]u16, []const u16 => {
|
||||
@@ -1055,7 +1055,7 @@ pub const String = struct {
|
||||
strings.eqlComptimeUTF16(s.slice16()[0..value.len], value);
|
||||
}
|
||||
|
||||
pub fn string(s: *const String, allocator: std.mem.Allocator) OOM![]const u8 {
|
||||
pub fn string(s: *const String, allocator: std.mem.Allocator) OOM!bun.string {
|
||||
if (s.isUTF8()) {
|
||||
return s.data;
|
||||
} else {
|
||||
@@ -1063,7 +1063,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM![:0]const u8 {
|
||||
pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM!bun.stringZ {
|
||||
if (s.isUTF8()) {
|
||||
return allocator.dupeZ(u8, s.data);
|
||||
} else {
|
||||
@@ -1071,7 +1071,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM![]const u8 {
|
||||
pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM!bun.string {
|
||||
if (s.isUTF8()) {
|
||||
return allocator.dupe(u8, s.data);
|
||||
} else {
|
||||
@@ -1091,7 +1091,7 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) !jsc.JSValue {
|
||||
pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) !JSC.JSValue {
|
||||
s.resolveRopeIfNeeded(allocator);
|
||||
if (!s.isPresent()) {
|
||||
var emp = bun.String.empty;
|
||||
@@ -1115,11 +1115,11 @@ pub const String = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toZigString(s: *String, allocator: std.mem.Allocator) jsc.ZigString {
|
||||
pub fn toZigString(s: *String, allocator: std.mem.Allocator) JSC.ZigString {
|
||||
if (s.isUTF8()) {
|
||||
return jsc.ZigString.fromUTF8(s.slice(allocator));
|
||||
return JSC.ZigString.fromUTF8(s.slice(allocator));
|
||||
} else {
|
||||
return jsc.ZigString.initUTF16(s.slice16());
|
||||
return JSC.ZigString.initUTF16(s.slice16());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1416,10 +1416,7 @@ pub const Import = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
const string = []const u8;
|
||||
const stringZ = [:0]const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
@@ -1427,20 +1424,24 @@ const bun = @import("bun");
|
||||
const ComptimeStringMap = bun.ComptimeStringMap;
|
||||
const Environment = bun.Environment;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const JSC = bun.JSC;
|
||||
const OOM = bun.OOM;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const stringZ = bun.stringZ;
|
||||
const strings = bun.strings;
|
||||
const Loader = bun.options.Loader;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
const G = js_ast.G;
|
||||
const Op = js_ast.Op;
|
||||
const OptionalChain = js_ast.OptionalChain;
|
||||
const Ref = js_ast.Ref;
|
||||
const ToJSError = js_ast.ToJSError;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -64,7 +64,7 @@ pub fn unwrapInlined(expr: Expr) Expr {
|
||||
}
|
||||
|
||||
pub fn fromBlob(
|
||||
blob: *const jsc.WebCore.Blob,
|
||||
blob: *const JSC.WebCore.Blob,
|
||||
allocator: std.mem.Allocator,
|
||||
mime_type_: ?MimeType,
|
||||
log: *logger.Log,
|
||||
@@ -108,7 +108,7 @@ pub fn fromBlob(
|
||||
return Expr.init(
|
||||
E.String,
|
||||
E.String{
|
||||
.data = try jsc.ZigString.init(bytes).toBase64DataURL(allocator),
|
||||
.data = try JSC.ZigString.init(bytes).toBase64DataURL(allocator),
|
||||
},
|
||||
loc,
|
||||
);
|
||||
@@ -147,7 +147,7 @@ pub fn hasAnyPropertyNamed(expr: *const Expr, comptime names: []const string) bo
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
return this.data.toJS(allocator, globalObject);
|
||||
}
|
||||
|
||||
@@ -3072,17 +3072,17 @@ pub const Data = union(Tag) {
|
||||
return Equality.unknown;
|
||||
}
|
||||
|
||||
pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue {
|
||||
pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue {
|
||||
return switch (this) {
|
||||
.e_array => |e| e.toJS(allocator, globalObject),
|
||||
.e_object => |e| e.toJS(allocator, globalObject),
|
||||
.e_string => |e| e.toJS(allocator, globalObject),
|
||||
.e_null => jsc.JSValue.null,
|
||||
.e_null => JSC.JSValue.null,
|
||||
.e_undefined => .js_undefined,
|
||||
.e_boolean => |boolean| if (boolean.value)
|
||||
jsc.JSValue.true
|
||||
JSC.JSValue.true
|
||||
else
|
||||
jsc.JSValue.false,
|
||||
JSC.JSValue.false,
|
||||
.e_number => |e| e.toJS(),
|
||||
// .e_big_int => |e| e.toJS(ctx, exception),
|
||||
|
||||
@@ -3097,7 +3097,7 @@ pub const Data = union(Tag) {
|
||||
// brk: {
|
||||
// // var node = try allocator.create(Macro.JSNode);
|
||||
// // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = logger.Loc.Empty });
|
||||
// // break :brk jsc.JSValue.c(Macro.JSNode.Class.make(globalObject, node));
|
||||
// // break :brk JSC.JSValue.c(Macro.JSNode.Class.make(globalObject, node));
|
||||
// },
|
||||
|
||||
else => {
|
||||
@@ -3201,16 +3201,17 @@ pub fn StoredData(tag: Tag) type {
|
||||
};
|
||||
}
|
||||
|
||||
extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64;
|
||||
|
||||
fn stringToEquivalentNumberValue(str: []const u8) f64 {
|
||||
// +"" -> 0
|
||||
if (str.len == 0) return 0;
|
||||
if (!bun.strings.isAllASCII(str))
|
||||
return std.math.nan(f64);
|
||||
return bun.cpp.JSC__jsToNumber(str.ptr, str.len);
|
||||
return JSC__jsToNumber(str.ptr, str.len);
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
const stringZ = [:0]const u8;
|
||||
// @sortImports
|
||||
|
||||
const JSPrinter = @import("../js_printer.zig");
|
||||
const std = @import("std");
|
||||
@@ -3218,17 +3219,19 @@ const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const JSONParser = bun.json;
|
||||
const JSC = bun.JSC;
|
||||
const JSONParser = bun.JSON;
|
||||
const MutableString = bun.MutableString;
|
||||
const OOM = bun.OOM;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const stringZ = bun.stringZ;
|
||||
const strings = bun.strings;
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
|
||||
@@ -208,16 +208,17 @@ pub const Arg = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const TypeScript = bun.js_parser.TypeScript;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const BindingNodeIndex = js_ast.BindingNodeIndex;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
|
||||
@@ -12,7 +12,7 @@ pub const MacroContext = struct {
|
||||
env: *DotEnv.Loader,
|
||||
macros: MacroMap,
|
||||
remap: MacroRemap,
|
||||
javascript_object: jsc.JSValue = jsc.JSValue.zero,
|
||||
javascript_object: JSC.JSValue = JSC.JSValue.zero,
|
||||
|
||||
pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry {
|
||||
if (this.remap.entries.len == 0) return null;
|
||||
@@ -51,7 +51,7 @@ pub const MacroContext = struct {
|
||||
bun.assert(!isMacroPath(import_record_path_without_macro_prefix));
|
||||
|
||||
const input_specifier = brk: {
|
||||
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
break :brk replacement.path;
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ pub fn init(
|
||||
defer resolver.opts.transform_options = old_transform_options;
|
||||
|
||||
// JSC needs to be initialized if building from CLI
|
||||
jsc.initialize(false);
|
||||
JSC.initialize(false);
|
||||
|
||||
var _vm = try JavaScript.VirtualMachine.init(.{
|
||||
.allocator = default_allocator,
|
||||
@@ -198,7 +198,7 @@ pub fn init(
|
||||
|
||||
const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash);
|
||||
|
||||
switch (loaded_result.unwrap(vm.jsc_vm, .leave_unhandled)) {
|
||||
switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) {
|
||||
.rejected => |result| {
|
||||
vm.unhandledRejection(vm.global, result, loaded_result.asValue());
|
||||
vm.disableMacroMode();
|
||||
@@ -214,17 +214,17 @@ pub fn init(
|
||||
}
|
||||
|
||||
pub const Runner = struct {
|
||||
const VisitMap = std.AutoHashMapUnmanaged(jsc.JSValue, Expr);
|
||||
const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr);
|
||||
|
||||
threadlocal var args_buf: [3]js.JSObjectRef = undefined;
|
||||
threadlocal var exception_holder: jsc.ZigException.Holder = undefined;
|
||||
threadlocal var exception_holder: JSC.ZigException.Holder = undefined;
|
||||
pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError;
|
||||
|
||||
pub const Run = struct {
|
||||
caller: Expr,
|
||||
function_name: string,
|
||||
macro: *const Macro,
|
||||
global: *jsc.JSGlobalObject,
|
||||
global: *JSC.JSGlobalObject,
|
||||
allocator: std.mem.Allocator,
|
||||
id: i32,
|
||||
log: *logger.Log,
|
||||
@@ -238,7 +238,7 @@ pub const Runner = struct {
|
||||
allocator: std.mem.Allocator,
|
||||
function_name: string,
|
||||
caller: Expr,
|
||||
args: []jsc.JSValue,
|
||||
args: []JSC.JSValue,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
) MacroError!Expr {
|
||||
@@ -273,9 +273,9 @@ pub const Runner = struct {
|
||||
|
||||
pub fn run(
|
||||
this: *Run,
|
||||
value: jsc.JSValue,
|
||||
value: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
return switch ((try jsc.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) {
|
||||
return switch ((try JSC.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) {
|
||||
.Error => this.coerce(value, .Error),
|
||||
.Undefined => this.coerce(value, .Undefined),
|
||||
.Null => this.coerce(value, .Null),
|
||||
@@ -305,8 +305,8 @@ pub const Runner = struct {
|
||||
|
||||
pub fn coerce(
|
||||
this: *Run,
|
||||
value: jsc.JSValue,
|
||||
comptime tag: jsc.ConsoleObject.Formatter.Tag,
|
||||
value: JSC.JSValue,
|
||||
comptime tag: JSC.ConsoleObject.Formatter.Tag,
|
||||
) MacroError!Expr {
|
||||
switch (comptime tag) {
|
||||
.Error => {
|
||||
@@ -325,15 +325,15 @@ pub const Runner = struct {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var blob_: ?jsc.WebCore.Blob = null;
|
||||
var blob_: ?JSC.WebCore.Blob = null;
|
||||
const mime_type: ?MimeType = null;
|
||||
|
||||
if (value.jsType() == .DOMWrapper) {
|
||||
if (value.as(jsc.WebCore.Response)) |resp| {
|
||||
if (value.as(JSC.WebCore.Response)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(jsc.WebCore.Request)) |resp| {
|
||||
} else if (value.as(JSC.WebCore.Request)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(jsc.WebCore.Blob)) |resp| {
|
||||
} else if (value.as(JSC.WebCore.Blob)) |resp| {
|
||||
blob_ = resp.*;
|
||||
blob_.?.allocator = null;
|
||||
} else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) {
|
||||
@@ -366,7 +366,7 @@ pub const Runner = struct {
|
||||
.Boolean => {
|
||||
return Expr{ .data = .{ .e_boolean = .{ .value = value.toBoolean() } }, .loc = this.caller.loc };
|
||||
},
|
||||
jsc.ConsoleObject.Formatter.Tag.Array => {
|
||||
JSC.ConsoleObject.Formatter.Tag.Array => {
|
||||
this.is_top_level = false;
|
||||
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
@@ -381,7 +381,7 @@ pub const Runner = struct {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var iter = try jsc.JSArrayIterator.init(value, this.global);
|
||||
var iter = try JSC.JSArrayIterator.init(value, this.global);
|
||||
if (iter.len == 0) {
|
||||
const result = Expr.init(
|
||||
E.Array,
|
||||
@@ -418,7 +418,7 @@ pub const Runner = struct {
|
||||
return out;
|
||||
},
|
||||
// TODO: optimize this
|
||||
jsc.ConsoleObject.Formatter.Tag.Object => {
|
||||
JSC.ConsoleObject.Formatter.Tag.Object => {
|
||||
this.is_top_level = false;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
@@ -433,7 +433,7 @@ pub const Runner = struct {
|
||||
}
|
||||
// SAFETY: tag ensures `value` is an object.
|
||||
const obj = value.getObject() orelse unreachable;
|
||||
var object_iter = try jsc.JSPropertyIterator(.{
|
||||
var object_iter = try JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(this.global, obj);
|
||||
@@ -466,7 +466,7 @@ pub const Runner = struct {
|
||||
// if (console_tag.cell == .JSDate) {
|
||||
// // in the code for printing dates, it never exceeds this amount
|
||||
// var iso_string_buf = this.allocator.alloc(u8, 36) catch unreachable;
|
||||
// var str = jsc.ZigString.init("");
|
||||
// var str = JSC.ZigString.init("");
|
||||
// value.jsonStringify(this.global, 0, &str);
|
||||
// var out_buf: []const u8 = std.fmt.bufPrint(iso_string_buf, "{}", .{str}) catch "";
|
||||
// if (out_buf.len > 2) {
|
||||
@@ -502,8 +502,8 @@ pub const Runner = struct {
|
||||
|
||||
this.macro.vm.waitForPromise(promise);
|
||||
|
||||
const promise_result = promise.result(this.macro.vm.jsc_vm);
|
||||
const rejected = promise.status(this.macro.vm.jsc_vm) == .rejected;
|
||||
const promise_result = promise.result(this.macro.vm.jsc);
|
||||
const rejected = promise.status(this.macro.vm.jsc) == .rejected;
|
||||
|
||||
if (promise_result.isUndefined() and this.is_top_level) {
|
||||
this.is_top_level = false;
|
||||
@@ -542,12 +542,12 @@ pub const Runner = struct {
|
||||
caller: Expr,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
javascript_object: jsc.JSValue,
|
||||
javascript_object: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
if (comptime Environment.isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{s}<r>", .{function_name});
|
||||
|
||||
exception_holder = jsc.ZigException.Holder.init();
|
||||
var js_args: []jsc.JSValue = &.{};
|
||||
exception_holder = JSC.ZigException.Holder.init();
|
||||
var js_args: []JSC.JSValue = &.{};
|
||||
var js_processed_args_len: usize = 0;
|
||||
defer {
|
||||
for (js_args[0..js_processed_args_len -| @as(usize, @intFromBool(javascript_object != .zero))]) |arg| {
|
||||
@@ -557,12 +557,12 @@ pub const Runner = struct {
|
||||
allocator.free(js_args);
|
||||
}
|
||||
|
||||
const globalObject = jsc.VirtualMachine.get().global;
|
||||
const globalObject = JSC.VirtualMachine.get().global;
|
||||
|
||||
switch (caller.data) {
|
||||
.e_call => |call| {
|
||||
const call_args: []Expr = call.args.slice();
|
||||
js_args = try allocator.alloc(jsc.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero)));
|
||||
js_args = try allocator.alloc(JSC.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero)));
|
||||
js_processed_args_len = js_args.len;
|
||||
|
||||
for (0.., call_args, js_args[0..call_args.len]) |i, in, *out| {
|
||||
@@ -589,7 +589,7 @@ pub const Runner = struct {
|
||||
|
||||
if (javascript_object != .zero) {
|
||||
if (js_args.len == 0) {
|
||||
js_args = try allocator.alloc(jsc.JSValue, 1);
|
||||
js_args = try allocator.alloc(JSC.JSValue, 1);
|
||||
}
|
||||
|
||||
js_args[js_args.len - 1] = javascript_object;
|
||||
@@ -601,9 +601,9 @@ pub const Runner = struct {
|
||||
threadlocal var call_args: CallArgs = undefined;
|
||||
threadlocal var result: MacroError!Expr = undefined;
|
||||
pub fn callWrapper(args: CallArgs) MacroError!Expr {
|
||||
jsc.markBinding(@src());
|
||||
JSC.markBinding(@src());
|
||||
call_args = args;
|
||||
Bun__startMacro(&call, jsc.VirtualMachine.get().global);
|
||||
Bun__startMacro(&call, JSC.VirtualMachine.get().global);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -631,7 +631,7 @@ pub const Runner = struct {
|
||||
extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void;
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const DotEnv = @import("../env_loader.zig");
|
||||
const std = @import("std");
|
||||
@@ -650,12 +650,17 @@ const Output = bun.Output;
|
||||
const Transpiler = bun.Transpiler;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const Loader = bun.options.Loader;
|
||||
const MimeType = bun.http.MimeType;
|
||||
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const JSC = bun.JSC;
|
||||
const JavaScript = bun.JSC;
|
||||
const js = bun.JSC.C;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
@@ -664,7 +669,3 @@ const Macro = js_ast.Macro;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const ToJSError = js_ast.ToJSError;
|
||||
|
||||
const JavaScript = bun.jsc;
|
||||
const jsc = bun.jsc;
|
||||
const js = bun.jsc.C;
|
||||
|
||||
@@ -164,6 +164,8 @@ pub fn NewStore(comptime types: []const type, comptime count: usize) type {
|
||||
};
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -281,11 +281,13 @@ pub const Table = brk: {
|
||||
break :brk table;
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const AssignTarget = js_ast.AssignTarget;
|
||||
const Op = js_ast.Op;
|
||||
|
||||
@@ -212,12 +212,13 @@ pub const Continue = struct {
|
||||
label: ?LocRef = null,
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Case = js_ast.Case;
|
||||
const Catch = js_ast.Catch;
|
||||
const ClauseItem = js_ast.ClauseItem;
|
||||
|
||||
@@ -205,13 +205,15 @@ pub inline fn kindStopsHoisting(s: *const Scope) bool {
|
||||
return @intFromEnum(s.kind) >= @intFromEnum(Kind.entry);
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Ref = js_ast.Ref;
|
||||
const Scope = js_ast.Scope;
|
||||
const StrictModeKind = js_ast.StrictModeKind;
|
||||
|
||||
@@ -112,10 +112,12 @@ pub const List = struct {
|
||||
};
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Index = js_ast.Index;
|
||||
const ServerComponentBoundary = js_ast.ServerComponentBoundary;
|
||||
const UseDirective = js_ast.UseDirective;
|
||||
|
||||
@@ -406,15 +406,16 @@ pub fn caresAboutScope(self: *Stmt) bool {
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const NewBatcher = js_ast.NewBatcher;
|
||||
|
||||
@@ -468,10 +468,7 @@ pub inline fn isHoisted(self: *const Symbol) bool {
|
||||
return Symbol.isKindHoisted(self.kind);
|
||||
}
|
||||
|
||||
pub const isKindFunction = Symbol.Kind.isFunction;
|
||||
pub const isKindHoisted = Symbol.Kind.isHoisted;
|
||||
pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction;
|
||||
pub const isKindPrivate = Symbol.Kind.isPrivate;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
@@ -479,9 +476,14 @@ const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Output = bun.Output;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const DeclaredSymbol = js_ast.DeclaredSymbol;
|
||||
const G = js_ast.G;
|
||||
const ImportItemStatus = js_ast.ImportItemStatus;
|
||||
const Ref = js_ast.Ref;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
pub const isKindFunction = Symbol.Kind.isFunction;
|
||||
pub const isKindHoisted = Symbol.Kind.isHoisted;
|
||||
pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction;
|
||||
pub const isKindPrivate = Symbol.Kind.isPrivate;
|
||||
|
||||
@@ -126,14 +126,16 @@ pub const TSNamespaceMember = struct {
|
||||
};
|
||||
};
|
||||
|
||||
pub const Class = G.Class;
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const G = js_ast.G;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -55,10 +55,12 @@ pub const UseDirective = enum(u2) {
|
||||
}
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const js_ast = bun.js_ast;
|
||||
const Flags = js_ast.Flags;
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
|
||||
const js_ast = bun.JSAst;
|
||||
|
||||
pub const NodeIndex = u32;
|
||||
pub const NodeIndexNone = 4294967293;
|
||||
|
||||
// TODO: figure out if we actually need this
|
||||
|
||||
pub const RefHashCtx = struct {
|
||||
@@ -149,7 +157,7 @@ pub const Ref = packed struct(u64) {
|
||||
} };
|
||||
}
|
||||
|
||||
fn dumpImpl(data: struct { ref: Ref, symbol: *ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
|
||||
fn dumpImpl(data: struct { ref: Ref, symbol: *js_ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
|
||||
try std.fmt.format(
|
||||
writer,
|
||||
"Ref[inner={d}, src={d}, .{s}; original_name={s}, uses={d}]",
|
||||
@@ -214,23 +222,18 @@ pub const Ref = packed struct(u64) {
|
||||
return try writer.write([2]u32{ self.sourceIndex(), self.innerIndex() });
|
||||
}
|
||||
|
||||
pub fn getSymbol(ref: Ref, symbol_table: anytype) *ast.Symbol {
|
||||
pub fn getSymbol(ref: Ref, symbol_table: anytype) *js_ast.Symbol {
|
||||
// Different parts of the bundler use different formats of the symbol table
|
||||
// In the parser you only have one array, and .sourceIndex() is ignored.
|
||||
// In the bundler, you have a 2D array where both parts of the ref are used.
|
||||
const resolved_symbol_table = switch (@TypeOf(symbol_table)) {
|
||||
*const std.ArrayList(ast.Symbol) => symbol_table.items,
|
||||
*std.ArrayList(ast.Symbol) => symbol_table.items,
|
||||
[]ast.Symbol => symbol_table,
|
||||
*ast.Symbol.Map => return symbol_table.get(ref) orelse
|
||||
*const std.ArrayList(js_ast.Symbol) => symbol_table.items,
|
||||
*std.ArrayList(js_ast.Symbol) => symbol_table.items,
|
||||
[]js_ast.Symbol => symbol_table,
|
||||
*js_ast.Symbol.Map => return symbol_table.get(ref) orelse
|
||||
unreachable, // ref must exist within symbol table
|
||||
else => |T| @compileError("Unsupported type to Ref.getSymbol: " ++ @typeName(T)),
|
||||
};
|
||||
return &resolved_symbol_table[ref.innerIndex()];
|
||||
}
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const ast = bun.ast;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user