mirror of
https://github.com/oven-sh/bun
synced 2026-02-07 01:18:51 +00:00
Compare commits
6 Commits
ciro/case-
...
add-bun-gi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a40e2b857e | ||
|
|
48a6082167 | ||
|
|
0b6d896adf | ||
|
|
c9dc5dd381 | ||
|
|
d33550ddba | ||
|
|
f02511d2f8 |
@@ -114,8 +114,7 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.23" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
// TODO: Re-enable when Windows ARM64 VS component installation is resolved on Buildkite runners
|
||||
// { os: "windows", arch: "aarch64", release: "2019" },
|
||||
{ os: "windows", arch: "aarch64", release: "2019" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -471,7 +470,7 @@ function getBuildCommand(target, options, label) {
|
||||
*/
|
||||
function getWindowsArm64CrossFlags(target) {
|
||||
if (target.os === "windows" && target.arch === "aarch64") {
|
||||
return " --toolchain windows-aarch64";
|
||||
return " --toolchain windows-aarch64 -DSKIP_CODEGEN=ON -DCMAKE_C_COMPILER=clang-cl -DCMAKE_CXX_COMPILER=clang-cl";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
@@ -484,7 +483,6 @@ function getWindowsArm64CrossFlags(target) {
|
||||
function getBuildCppStep(platform, options) {
|
||||
const command = getBuildCommand(platform, options);
|
||||
const crossFlags = getWindowsArm64CrossFlags(platform);
|
||||
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-cpp`,
|
||||
label: `${getTargetLabel(platform)} - build-cpp`,
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"src/io/*.cpp",
|
||||
"src/bun.js/modules/*.cpp",
|
||||
"src/bun.js/bindings/*.cpp",
|
||||
"src/bun.js/bindings/git/*.cpp",
|
||||
"src/bun.js/bindings/webcore/*.cpp",
|
||||
"src/bun.js/bindings/sqlite/*.cpp",
|
||||
"src/bun.js/bindings/webcrypto/*.cpp",
|
||||
|
||||
@@ -7,13 +7,6 @@ register_repository(
|
||||
4f4f5ef8ebc6e23cbf393428f0ab1b526773f7ac
|
||||
)
|
||||
|
||||
set(BORINGSSL_CMAKE_ARGS -DBUILD_SHARED_LIBS=OFF)
|
||||
|
||||
# Disable ASM on Windows ARM64 to avoid mixing non-ARM object files into ARM64 libs
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND BORINGSSL_CMAKE_ARGS -DOPENSSL_NO_ASM=1)
|
||||
endif()
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
boringssl
|
||||
@@ -22,7 +15,7 @@ register_cmake_command(
|
||||
ssl
|
||||
decrepit
|
||||
ARGS
|
||||
${BORINGSSL_CMAKE_ARGS}
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
INCLUDES
|
||||
include
|
||||
)
|
||||
|
||||
@@ -54,6 +54,7 @@ set(BUN_DEPENDENCIES
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
Libgit2
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
@@ -1322,7 +1323,7 @@ list(TRANSFORM BUN_DEPENDENCIES TOLOWER OUTPUT_VARIABLE BUN_TARGETS)
|
||||
add_custom_target(dependencies DEPENDS ${BUN_TARGETS})
|
||||
|
||||
if(APPLE)
|
||||
target_link_libraries(${bun} PRIVATE icucore resolv)
|
||||
target_link_libraries(${bun} PRIVATE icucore resolv iconv)
|
||||
target_compile_definitions(${bun} PRIVATE U_DISABLE_RENAMING=1)
|
||||
endif()
|
||||
|
||||
@@ -1457,8 +1458,6 @@ if(NOT BUN_CPP_ONLY)
|
||||
# ==856230==See https://github.com/google/sanitizers/issues/856 for possible workarounds.
|
||||
# the linked issue refers to very old kernels but this still happens to us on modern ones.
|
||||
# disabling ASLR to run the binary works around it
|
||||
# Skip post-build test/features when cross-compiling (can't run the target binary on the host)
|
||||
if(NOT CMAKE_CROSSCOMPILING)
|
||||
set(TEST_BUN_COMMAND_BASE ${BUILD_PATH}/${bunExe} --revision)
|
||||
set(TEST_BUN_COMMAND_ENV_WRAP
|
||||
${CMAKE_COMMAND} -E env BUN_DEBUG_QUIET_LOGS=1)
|
||||
@@ -1507,7 +1506,6 @@ if(NOT BUN_CPP_ONLY)
|
||||
${BUILD_PATH}/features.json
|
||||
)
|
||||
endif()
|
||||
endif() # NOT CMAKE_CROSSCOMPILING
|
||||
|
||||
if(CMAKE_HOST_APPLE AND bunStrip)
|
||||
register_command(
|
||||
@@ -1554,10 +1552,7 @@ if(NOT BUN_CPP_ONLY)
|
||||
string(REPLACE bun ${bunTriplet} bunPath ${bun})
|
||||
endif()
|
||||
|
||||
set(bunFiles ${bunExe})
|
||||
if(NOT CMAKE_CROSSCOMPILING)
|
||||
list(APPEND bunFiles features.json)
|
||||
endif()
|
||||
set(bunFiles ${bunExe} features.json)
|
||||
if(WIN32)
|
||||
list(APPEND bunFiles ${bun}.pdb)
|
||||
elseif(APPLE)
|
||||
|
||||
40
cmake/targets/BuildLibgit2.cmake
Normal file
40
cmake/targets/BuildLibgit2.cmake
Normal file
@@ -0,0 +1,40 @@
|
||||
register_repository(
|
||||
NAME
|
||||
libgit2
|
||||
REPOSITORY
|
||||
libgit2/libgit2
|
||||
TAG
|
||||
v1.9.0
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
libgit2
|
||||
TARGETS
|
||||
libgit2package
|
||||
ARGS
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_CLI=OFF
|
||||
-DBUILD_EXAMPLES=OFF
|
||||
-DBUILD_FUZZERS=OFF
|
||||
# Network disabled - local operations only
|
||||
-DUSE_HTTPS=OFF
|
||||
-DUSE_SSH=OFF
|
||||
# Use bundled dependencies to avoid symbol conflicts with Bun's libraries
|
||||
-DUSE_BUNDLED_ZLIB=ON
|
||||
-DUSE_HTTP_PARSER=builtin
|
||||
-DREGEX_BACKEND=builtin
|
||||
-DUSE_SHA1=CollisionDetection
|
||||
# Enable threading
|
||||
-DUSE_THREADS=ON
|
||||
# Disable authentication features (not needed for local operations)
|
||||
-DUSE_GSSAPI=OFF
|
||||
LIB_PATH
|
||||
.
|
||||
LIBRARIES
|
||||
git2
|
||||
INCLUDES
|
||||
include
|
||||
)
|
||||
@@ -26,12 +26,6 @@ if(RELEASE)
|
||||
list(APPEND LOLHTML_BUILD_ARGS --release)
|
||||
endif()
|
||||
|
||||
# Cross-compilation: tell cargo to target ARM64
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND LOLHTML_BUILD_ARGS --target aarch64-pc-windows-msvc)
|
||||
set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/aarch64-pc-windows-msvc/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
endif()
|
||||
|
||||
# Windows requires unwind tables, apparently.
|
||||
if (NOT WIN32)
|
||||
# The encoded escape sequences are intentional. They're how you delimit multiple arguments in a single environment variable.
|
||||
@@ -57,18 +51,11 @@ if(WIN32)
|
||||
if(MSVC_VERSIONS)
|
||||
list(GET MSVC_VERSIONS -1 MSVC_LATEST) # Get the latest version
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64")
|
||||
# Use Hostx64/arm64 for cross-compilation from x64, fall back to native
|
||||
if(EXISTS "${MSVC_LATEST}/bin/Hostx64/arm64/link.exe")
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/arm64/link.exe")
|
||||
else()
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
|
||||
endif()
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_AARCH64_PC_WINDOWS_MSVC_LINKER")
|
||||
set(MSVC_LIB_ARCH "arm64")
|
||||
else()
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/x64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER")
|
||||
set(MSVC_LIB_ARCH "x64")
|
||||
endif()
|
||||
if(EXISTS "${MSVC_LINK_PATH}")
|
||||
list(APPEND LOLHTML_ENV "${CARGO_LINKER_VAR}=${MSVC_LINK_PATH}")
|
||||
|
||||
@@ -3,35 +3,18 @@ set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
set(CMAKE_CROSSCOMPILING ON)
|
||||
|
||||
# The rest only applies when building on Windows (C++ and link steps).
|
||||
# The Zig step runs on Linux and only needs CMAKE_SYSTEM_NAME/PROCESSOR above.
|
||||
if(CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows")
|
||||
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
|
||||
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
|
||||
# Ensure clang/clang-cl targets Windows ARM64 (otherwise ARM64-specific flags like
|
||||
# -march=armv8-a are rejected as x86-only).
|
||||
set(CMAKE_C_COMPILER_TARGET aarch64-pc-windows-msvc CACHE STRING "" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_TARGET aarch64-pc-windows-msvc CACHE STRING "" FORCE)
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
|
||||
|
||||
# ARM64 has lock-free atomics (highway's FindAtomics check can't run ARM64 test binary on x64)
|
||||
set(ATOMICS_LOCK_FREE_INSTRUCTIONS TRUE CACHE BOOL "" FORCE)
|
||||
set(HAVE_CXX_ATOMICS_WITHOUT_LIB TRUE CACHE BOOL "" FORCE)
|
||||
set(HAVE_CXX_ATOMICS64_WITHOUT_LIB TRUE CACHE BOOL "" FORCE)
|
||||
# Clear any inherited static linker flags that might have wrong machine types
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
|
||||
|
||||
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
|
||||
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
|
||||
|
||||
# Clear any inherited static linker flags that might have wrong machine types
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
|
||||
|
||||
# Use wrapper script for llvm-lib that strips /machine:x64 flags
|
||||
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
|
||||
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
|
||||
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)
|
||||
|
||||
endif()
|
||||
# Use wrapper script for llvm-lib that strips /machine:x64 flags
|
||||
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
|
||||
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
|
||||
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)
|
||||
@@ -50,11 +50,6 @@ if(APPLE)
|
||||
list(APPEND LLVM_PATHS ${HOMEBREW_PREFIX}/opt/llvm/bin)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# Prefer standalone LLVM over VS-bundled (standalone supports cross-compilation)
|
||||
list(APPEND LLVM_PATHS "C:/Program Files/LLVM/bin")
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
list(APPEND LLVM_PATHS /usr/lib/llvm/bin)
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ title: Markdown
|
||||
description: Parse and render Markdown with Bun's built-in Markdown API, supporting GFM extensions and custom rendering callbacks
|
||||
---
|
||||
|
||||
<Callout type="note">
|
||||
**Unstable API** — This API is under active development and may change in future versions of Bun.
|
||||
</Callout>
|
||||
{% callout type="note" %}
|
||||
**Unstable API** — This API is under active development and may change in future versions of Bun.
|
||||
{% /callout %}
|
||||
|
||||
Bun includes a fast, built-in Markdown parser written in Zig. It supports GitHub Flavored Markdown (GFM) extensions and provides three APIs:
|
||||
|
||||
|
||||
125
packages/bun-types/bun.d.ts
vendored
125
packages/bun-types/bun.d.ts
vendored
@@ -610,129 +610,6 @@ declare module "bun" {
|
||||
*/
|
||||
function stripANSI(input: string): string;
|
||||
|
||||
/**
|
||||
* Converts a string to camelCase.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The camelCase version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.camelCase("foo bar") // "fooBar"
|
||||
* Bun.camelCase("XMLParser") // "xmlParser"
|
||||
* ```
|
||||
*/
|
||||
function camelCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to PascalCase.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The PascalCase version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.pascalCase("foo bar") // "FooBar"
|
||||
* ```
|
||||
*/
|
||||
function pascalCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to snake_case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The snake_case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.snakeCase("fooBar") // "foo_bar"
|
||||
* ```
|
||||
*/
|
||||
function snakeCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to kebab-case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The kebab-case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.kebabCase("fooBar") // "foo-bar"
|
||||
* ```
|
||||
*/
|
||||
function kebabCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to CONSTANT_CASE.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The CONSTANT_CASE version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.constantCase("fooBar") // "FOO_BAR"
|
||||
* ```
|
||||
*/
|
||||
function constantCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to dot.case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The dot.case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.dotCase("fooBar") // "foo.bar"
|
||||
* ```
|
||||
*/
|
||||
function dotCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to Capital Case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The Capital Case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.capitalCase("fooBar") // "Foo Bar"
|
||||
* ```
|
||||
*/
|
||||
function capitalCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to Train-Case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The Train-Case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.trainCase("fooBar") // "Foo-Bar"
|
||||
* ```
|
||||
*/
|
||||
function trainCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to path/case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The path/case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.pathCase("fooBar") // "foo/bar"
|
||||
* ```
|
||||
*/
|
||||
function pathCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to Sentence case.
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The Sentence case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.sentenceCase("fooBar") // "Foo bar"
|
||||
* ```
|
||||
*/
|
||||
function sentenceCase(input: string): string;
|
||||
/**
|
||||
* Converts a string to no case (lowercased words separated by spaces).
|
||||
*
|
||||
* @param input The string to convert.
|
||||
* @returns The no case version of the string.
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.noCase("fooBar") // "foo bar"
|
||||
* ```
|
||||
*/
|
||||
function noCase(input: string): string;
|
||||
|
||||
interface WrapAnsiOptions {
|
||||
/**
|
||||
* If `true`, break words in the middle if they don't fit on a line.
|
||||
@@ -2277,7 +2154,7 @@ declare module "bun" {
|
||||
interface Hash {
|
||||
wyhash: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
adler32: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
crc32: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
crc32: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
cityHash32: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
cityHash64: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
xxHash32: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
|
||||
642
packages/bun-types/git.d.ts
vendored
Normal file
642
packages/bun-types/git.d.ts
vendored
Normal file
@@ -0,0 +1,642 @@
|
||||
/**
|
||||
* Fast Git operations for Bun.js powered by libgit2.
|
||||
*
|
||||
* This module provides read-only Git repository operations.
|
||||
* Network operations (HTTPS/SSH) are not supported - local operations only.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { Repository } from 'bun:git';
|
||||
*
|
||||
* const repo = Repository.open('.');
|
||||
* const head = repo.head();
|
||||
* console.log(`HEAD: ${head.id} - ${head.summary}`);
|
||||
* console.log(`Author: ${head.author.name} <${head.author.email}>`);
|
||||
* ```
|
||||
*
|
||||
* @module bun:git
|
||||
*/
|
||||
declare module "bun:git" {
|
||||
/**
|
||||
* Represents a Git signature (author or committer information).
|
||||
*/
|
||||
export interface Signature {
|
||||
/**
|
||||
* The name of the person.
|
||||
* @example "John Doe"
|
||||
*/
|
||||
readonly name: string;
|
||||
|
||||
/**
|
||||
* The email address of the person.
|
||||
* @example "john@example.com"
|
||||
*/
|
||||
readonly email: string;
|
||||
|
||||
/**
|
||||
* Unix timestamp of when the signature was created.
|
||||
* @example 1704067200
|
||||
*/
|
||||
readonly time: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Status flags for working directory entries.
|
||||
* These are bit flags that can be combined with bitwise OR.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { Status } from 'bun:git';
|
||||
*
|
||||
* const entries = repo.getStatus();
|
||||
* for (const entry of entries) {
|
||||
* if (entry.status & Status.WT_MODIFIED) {
|
||||
* console.log('Modified in workdir:', entry.path);
|
||||
* }
|
||||
* if (entry.status & Status.INDEX_NEW) {
|
||||
* console.log('New in index:', entry.path);
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export const Status: {
|
||||
/** Entry is current and unchanged */
|
||||
readonly CURRENT: 0;
|
||||
/** Entry is new in the index */
|
||||
readonly INDEX_NEW: 1;
|
||||
/** Entry is modified in the index */
|
||||
readonly INDEX_MODIFIED: 2;
|
||||
/** Entry is deleted in the index */
|
||||
readonly INDEX_DELETED: 4;
|
||||
/** Entry is renamed in the index */
|
||||
readonly INDEX_RENAMED: 8;
|
||||
/** Entry type changed in the index */
|
||||
readonly INDEX_TYPECHANGE: 16;
|
||||
/** Entry is new in the working tree */
|
||||
readonly WT_NEW: 128;
|
||||
/** Entry is modified in the working tree */
|
||||
readonly WT_MODIFIED: 256;
|
||||
/** Entry is deleted in the working tree */
|
||||
readonly WT_DELETED: 512;
|
||||
/** Entry type changed in the working tree */
|
||||
readonly WT_TYPECHANGE: 1024;
|
||||
/** Entry is renamed in the working tree */
|
||||
readonly WT_RENAMED: 2048;
|
||||
/** Entry is ignored */
|
||||
readonly IGNORED: 16384;
|
||||
/** Entry is conflicted */
|
||||
readonly CONFLICTED: 32768;
|
||||
};
|
||||
|
||||
/**
|
||||
* Delta types for diff entries.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { DeltaType } from 'bun:git';
|
||||
*
|
||||
* const diff = repo.diff();
|
||||
* for (const file of diff.files) {
|
||||
* if (file.status === DeltaType.ADDED) {
|
||||
* console.log('Added:', file.newPath);
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export const DeltaType: {
|
||||
/** No changes */
|
||||
readonly UNMODIFIED: 0;
|
||||
/** Entry does not exist in old version */
|
||||
readonly ADDED: 1;
|
||||
/** Entry does not exist in new version */
|
||||
readonly DELETED: 2;
|
||||
/** Entry content changed between old and new */
|
||||
readonly MODIFIED: 3;
|
||||
/** Entry was renamed between old and new */
|
||||
readonly RENAMED: 4;
|
||||
/** Entry was copied from another old entry */
|
||||
readonly COPIED: 5;
|
||||
/** Entry is ignored item in workdir */
|
||||
readonly IGNORED: 6;
|
||||
/** Entry is untracked item in workdir */
|
||||
readonly UNTRACKED: 7;
|
||||
/** Entry type changed between old and new */
|
||||
readonly TYPECHANGE: 8;
|
||||
/** Entry is unreadable */
|
||||
readonly CONFLICTED: 10;
|
||||
};
|
||||
|
||||
/**
|
||||
* Options for getting repository status.
|
||||
*/
|
||||
export interface StatusOptions {
|
||||
/**
|
||||
* Include untracked files in the status.
|
||||
* @default true
|
||||
*/
|
||||
includeUntracked?: boolean;
|
||||
|
||||
/**
|
||||
* Include ignored files in the status.
|
||||
* @default false
|
||||
*/
|
||||
includeIgnored?: boolean;
|
||||
|
||||
/**
|
||||
* Recurse into untracked directories.
|
||||
* @default true
|
||||
*/
|
||||
recurseUntrackedDirs?: boolean;
|
||||
|
||||
/**
|
||||
* Detect renamed files.
|
||||
* @default false
|
||||
*/
|
||||
detectRenames?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a status entry for a file in the working directory.
|
||||
*/
|
||||
export class StatusEntry {
|
||||
/**
|
||||
* The path of the file relative to the repository root.
|
||||
*/
|
||||
readonly path: string;
|
||||
|
||||
/**
|
||||
* Status flags (combination of Status values).
|
||||
*/
|
||||
readonly status: number;
|
||||
|
||||
/**
|
||||
* Check if the entry is new (untracked or staged as new).
|
||||
*/
|
||||
isNew(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry is modified.
|
||||
*/
|
||||
isModified(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry is deleted.
|
||||
*/
|
||||
isDeleted(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry is renamed.
|
||||
*/
|
||||
isRenamed(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry is ignored.
|
||||
*/
|
||||
isIgnored(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry has changes staged in the index.
|
||||
*/
|
||||
inIndex(): boolean;
|
||||
|
||||
/**
|
||||
* Check if the entry has changes in the working tree.
|
||||
*/
|
||||
inWorkingTree(): boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an entry in the Git index.
|
||||
*/
|
||||
export interface IndexEntry {
|
||||
/**
|
||||
* The path of the file relative to the repository root.
|
||||
*/
|
||||
readonly path: string;
|
||||
|
||||
/**
|
||||
* The file mode (e.g., 0o100644 for regular files).
|
||||
*/
|
||||
readonly mode: number;
|
||||
|
||||
/**
|
||||
* The blob OID (SHA-1 hash) of the file content.
|
||||
*/
|
||||
readonly oid: string;
|
||||
|
||||
/**
|
||||
* The stage number (0 for normal, 1-3 for conflict stages).
|
||||
*/
|
||||
readonly stage: number;
|
||||
|
||||
/**
|
||||
* The file size in bytes.
|
||||
*/
|
||||
readonly size: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for getting diff information.
|
||||
*/
|
||||
export interface DiffOptions {
|
||||
/**
|
||||
* If true, compare HEAD to index (staged changes).
|
||||
* If false, compare HEAD to working directory.
|
||||
* @default false
|
||||
*/
|
||||
cached?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a changed file in a diff.
|
||||
*/
|
||||
export interface DiffFile {
|
||||
/**
|
||||
* The type of change (see DeltaType).
|
||||
*/
|
||||
readonly status: number;
|
||||
|
||||
/**
|
||||
* The old path (null for added files).
|
||||
*/
|
||||
readonly oldPath: string | null;
|
||||
|
||||
/**
|
||||
* The new path.
|
||||
*/
|
||||
readonly newPath: string;
|
||||
|
||||
/**
|
||||
* Similarity percentage for renamed/copied files (0-100).
|
||||
*/
|
||||
readonly similarity?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a diff operation.
|
||||
*/
|
||||
export interface DiffResult {
|
||||
/**
|
||||
* List of changed files.
|
||||
*/
|
||||
readonly files: DiffFile[];
|
||||
|
||||
/**
|
||||
* Statistics about the diff.
|
||||
*/
|
||||
readonly stats: {
|
||||
/** Number of files changed */
|
||||
readonly filesChanged: number;
|
||||
/** Total lines inserted */
|
||||
readonly insertions: number;
|
||||
/** Total lines deleted */
|
||||
readonly deletions: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for getting commit history.
|
||||
*/
|
||||
export interface LogOptions {
|
||||
/**
|
||||
* Starting point for history traversal.
|
||||
* @default "HEAD"
|
||||
*/
|
||||
from?: string;
|
||||
|
||||
/**
|
||||
* Range specification (e.g., "origin/main..HEAD").
|
||||
* If provided, `from` is ignored.
|
||||
*/
|
||||
range?: string;
|
||||
|
||||
/**
|
||||
* Maximum number of commits to return.
|
||||
* @default unlimited
|
||||
*/
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a Git commit object.
|
||||
*
|
||||
* A commit contains information about a snapshot of the repository,
|
||||
* including the author, committer, message, and parent commits.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const head = repo.head();
|
||||
* console.log(head.id); // "abc123..."
|
||||
* console.log(head.message); // "feat: add new feature\n\nDetailed description..."
|
||||
* console.log(head.summary); // "feat: add new feature"
|
||||
* ```
|
||||
*/
|
||||
export class Commit {
|
||||
/**
|
||||
* The full 40-character hexadecimal SHA-1 hash of the commit.
|
||||
* @example "a1b2c3d4e5f6..."
|
||||
*/
|
||||
readonly id: string;
|
||||
|
||||
/**
|
||||
* The full commit message, including the body.
|
||||
* @example "feat: add new feature\n\nThis commit adds..."
|
||||
*/
|
||||
readonly message: string;
|
||||
|
||||
/**
|
||||
* The first line of the commit message (the summary/title).
|
||||
* Does not include any trailing newline.
|
||||
* @example "feat: add new feature"
|
||||
*/
|
||||
readonly summary: string;
|
||||
|
||||
/**
|
||||
* The author of the commit (who wrote the changes).
|
||||
*/
|
||||
readonly author: Signature;
|
||||
|
||||
/**
|
||||
* The committer of the commit (who committed the changes).
|
||||
* This may differ from the author in cases like cherry-picks or rebases.
|
||||
*/
|
||||
readonly committer: Signature;
|
||||
|
||||
/**
|
||||
* Unix timestamp of when the commit was created.
|
||||
* This is the committer's timestamp.
|
||||
* @example 1704067200
|
||||
*/
|
||||
readonly time: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a Git repository.
|
||||
*
|
||||
* Use {@link Repository.open} to open an existing repository.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { Repository } from 'bun:git';
|
||||
*
|
||||
* // Open the repository at the current directory
|
||||
* const repo = Repository.open('.');
|
||||
*
|
||||
* // Get repository info
|
||||
* console.log('Path:', repo.path); // "/path/to/repo/.git/"
|
||||
* console.log('Workdir:', repo.workdir); // "/path/to/repo/"
|
||||
* console.log('Is bare:', repo.isBare); // false
|
||||
*
|
||||
* // Get the HEAD commit
|
||||
* const head = repo.head();
|
||||
* console.log('HEAD:', head.id.slice(0, 7), head.summary);
|
||||
* ```
|
||||
*/
|
||||
export class Repository {
|
||||
/**
|
||||
* Opens an existing Git repository.
|
||||
*
|
||||
* The path can point to either a working directory or a bare repository.
|
||||
* If the path points to a working directory, the `.git` directory will be located automatically.
|
||||
*
|
||||
* @param path Path to the repository (working directory or .git directory)
|
||||
* @returns A Repository instance
|
||||
* @throws Error if the path is not a valid Git repository
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Open by working directory
|
||||
* const repo = Repository.open('/path/to/project');
|
||||
*
|
||||
* // Open by .git directory
|
||||
* const repo2 = Repository.open('/path/to/project/.git');
|
||||
*
|
||||
* // Open current directory
|
||||
* const repo3 = Repository.open('.');
|
||||
* ```
|
||||
*/
|
||||
static open(path: string): Repository;
|
||||
|
||||
/**
|
||||
* Gets the commit that HEAD currently points to.
|
||||
*
|
||||
* @returns The commit that HEAD references
|
||||
* @throws Error if HEAD is unborn (new repository with no commits)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const head = repo.head();
|
||||
* console.log(`Current commit: ${head.summary}`);
|
||||
* console.log(`Author: ${head.author.name}`);
|
||||
* ```
|
||||
*/
|
||||
head(): Commit;
|
||||
|
||||
/**
|
||||
* The path to the `.git` directory.
|
||||
* Always ends with a trailing slash.
|
||||
*
|
||||
* @example "/Users/me/project/.git/"
|
||||
*/
|
||||
readonly path: string;
|
||||
|
||||
/**
|
||||
* The path to the working directory.
|
||||
* Returns `null` for bare repositories.
|
||||
* When present, always ends with a trailing slash.
|
||||
*
|
||||
* @example "/Users/me/project/"
|
||||
*/
|
||||
readonly workdir: string | null;
|
||||
|
||||
/**
|
||||
* Whether this is a bare repository.
|
||||
* Bare repositories have no working directory.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* if (repo.isBare) {
|
||||
* console.log('This is a bare repository');
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
readonly isBare: boolean;
|
||||
|
||||
/**
|
||||
* Gets the working directory status.
|
||||
*
|
||||
* Returns an array of status entries for all changed files in the
|
||||
* working directory and index.
|
||||
*
|
||||
* @param options Options to control which files are included
|
||||
* @returns Array of status entries
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { Repository, Status } from 'bun:git';
|
||||
*
|
||||
* const repo = Repository.open('.');
|
||||
* const status = repo.getStatus();
|
||||
*
|
||||
* for (const entry of status) {
|
||||
* if (entry.isModified()) {
|
||||
* console.log('Modified:', entry.path);
|
||||
* }
|
||||
* if (entry.isNew()) {
|
||||
* console.log('New:', entry.path);
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
getStatus(options?: StatusOptions): StatusEntry[];
|
||||
|
||||
/**
|
||||
* Resolves a revision specification to a commit OID.
|
||||
*
|
||||
* Supports standard Git revision syntax including:
|
||||
* - Branch names: "main", "feature/foo"
|
||||
* - Tag names: "v1.0.0"
|
||||
* - SHA prefixes: "abc123"
|
||||
* - Special refs: "HEAD", "HEAD~1", "HEAD^2"
|
||||
* - Upstream: "@{u}", "main@{u}"
|
||||
*
|
||||
* @param spec The revision specification to resolve
|
||||
* @returns The 40-character hex OID
|
||||
* @throws Error if the spec cannot be resolved
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const headOid = repo.revParse('HEAD');
|
||||
* const parentOid = repo.revParse('HEAD~1');
|
||||
* const branchOid = repo.revParse('main');
|
||||
* ```
|
||||
*/
|
||||
revParse(spec: string): string;
|
||||
|
||||
/**
|
||||
* Gets the name of the current branch.
|
||||
*
|
||||
* @returns The branch name, or null if HEAD is detached or unborn
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const branch = repo.getCurrentBranch();
|
||||
* if (branch) {
|
||||
* console.log('On branch:', branch);
|
||||
* } else {
|
||||
* console.log('HEAD is detached');
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
getCurrentBranch(): string | null;
|
||||
|
||||
/**
|
||||
* Gets the ahead/behind counts between two commits.
|
||||
*
|
||||
* This is useful for comparing a local branch to its upstream.
|
||||
*
|
||||
* @param local The local ref (default: "HEAD")
|
||||
* @param upstream The upstream ref (default: "@{u}")
|
||||
* @returns Object with ahead and behind counts
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const { ahead, behind } = repo.aheadBehind();
|
||||
* console.log(`${ahead} ahead, ${behind} behind`);
|
||||
*
|
||||
* // Compare specific refs
|
||||
* const { ahead, behind } = repo.aheadBehind('feature', 'origin/main');
|
||||
* ```
|
||||
*/
|
||||
aheadBehind(local?: string, upstream?: string): { ahead: number; behind: number };
|
||||
|
||||
/**
|
||||
* Gets the list of files tracked in the index.
|
||||
*
|
||||
* @returns Array of index entries
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const files = repo.listFiles();
|
||||
* console.log(`Tracking ${files.length} files`);
|
||||
*
|
||||
* for (const file of files) {
|
||||
* console.log(`${file.path} (mode: ${file.mode.toString(8)})`);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
listFiles(): IndexEntry[];
|
||||
|
||||
/**
|
||||
* Gets diff information between HEAD and working directory or index.
|
||||
*
|
||||
* @param options Options to control the diff behavior
|
||||
* @returns Diff result with file list and statistics
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { Repository, DeltaType } from 'bun:git';
|
||||
*
|
||||
* const repo = Repository.open('.');
|
||||
*
|
||||
* // Unstaged changes (HEAD vs workdir)
|
||||
* const diff = repo.diff();
|
||||
* console.log(`${diff.stats.filesChanged} files changed`);
|
||||
* console.log(`+${diff.stats.insertions} -${diff.stats.deletions}`);
|
||||
*
|
||||
* // Staged changes (HEAD vs index)
|
||||
* const staged = repo.diff({ cached: true });
|
||||
*
|
||||
* for (const file of diff.files) {
|
||||
* if (file.status === DeltaType.MODIFIED) {
|
||||
* console.log('Modified:', file.newPath);
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
diff(options?: DiffOptions): DiffResult;
|
||||
|
||||
/**
|
||||
* Counts the number of commits in a range.
|
||||
*
|
||||
* @param range Optional range specification (e.g., "origin/main..HEAD")
|
||||
* @returns Number of commits
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Total commits
|
||||
* const total = repo.countCommits();
|
||||
*
|
||||
* // Commits since origin/main
|
||||
* const since = repo.countCommits('origin/main..HEAD');
|
||||
* ```
|
||||
*/
|
||||
countCommits(range?: string): number;
|
||||
|
||||
/**
|
||||
* Gets the commit history.
|
||||
*
|
||||
* @param options Options to control the log behavior
|
||||
* @returns Array of commits
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Last 10 commits
|
||||
* const commits = repo.log({ limit: 10 });
|
||||
*
|
||||
* for (const commit of commits) {
|
||||
* console.log(`${commit.id.slice(0, 7)} ${commit.summary}`);
|
||||
* }
|
||||
*
|
||||
* // Commits in a range
|
||||
* const range = repo.log({ range: 'origin/main..HEAD' });
|
||||
*
|
||||
* // Commits from a specific ref
|
||||
* const fromTag = repo.log({ from: 'v1.0.0', limit: 5 });
|
||||
* ```
|
||||
*/
|
||||
log(options?: LogOptions): Commit[];
|
||||
}
|
||||
|
||||
export default Repository;
|
||||
}
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -14,6 +14,7 @@
|
||||
/// <reference path="./html-rewriter.d.ts" />
|
||||
/// <reference path="./jsc.d.ts" />
|
||||
/// <reference path="./sqlite.d.ts" />
|
||||
/// <reference path="./git.d.ts" />
|
||||
/// <reference path="./test.d.ts" />
|
||||
/// <reference path="./wasm.d.ts" />
|
||||
/// <reference path="./overrides.d.ts" />
|
||||
|
||||
@@ -566,10 +566,8 @@ namespace uWS
|
||||
|
||||
|
||||
bool isHTTPMethod = (__builtin_expect(data[1] == '/', 1));
|
||||
bool isConnect = !isHTTPMethod && ((data - start) == 7 && memcmp(start, "CONNECT", 7) == 0);
|
||||
/* Also accept proxy-style absolute URLs (http://... or https://...) as valid request targets */
|
||||
bool isProxyStyleURL = !isHTTPMethod && !isConnect && data[0] == 32 && isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1;
|
||||
if (isHTTPMethod || isConnect || isProxyStyleURL) [[likely]] {
|
||||
bool isConnect = !isHTTPMethod && (isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1 || ((data - start) == 7 && memcmp(start, "CONNECT", 7) == 0));
|
||||
if (isHTTPMethod || isConnect) [[likely]] {
|
||||
header.key = {start, (size_t) (data - start)};
|
||||
data++;
|
||||
if(!isValidMethod(header.key, useStrictMethodValidation)) {
|
||||
|
||||
@@ -57,11 +57,7 @@ async function build(args) {
|
||||
if (process.platform === "win32" && !process.env["VSINSTALLDIR"]) {
|
||||
const shellPath = join(import.meta.dirname, "vs-shell.ps1");
|
||||
const scriptPath = import.meta.filename;
|
||||
// When cross-compiling to ARM64, tell vs-shell.ps1 to set up the x64_arm64 VS environment
|
||||
const toolchainIdx = args.indexOf("--toolchain");
|
||||
const requestedVsArch = toolchainIdx !== -1 && args[toolchainIdx + 1] === "windows-aarch64" ? "arm64" : undefined;
|
||||
const env = requestedVsArch ? { ...process.env, BUN_VS_ARCH: requestedVsArch } : undefined;
|
||||
return spawn("pwsh", ["-NoProfile", "-NoLogo", "-File", shellPath, process.argv0, scriptPath, ...args], { env });
|
||||
return spawn("pwsh", ["-NoProfile", "-NoLogo", "-File", shellPath, process.argv0, scriptPath, ...args]);
|
||||
}
|
||||
|
||||
if (isCI) {
|
||||
@@ -96,9 +92,21 @@ async function build(args) {
|
||||
generateOptions["--toolchain"] = toolchainPath;
|
||||
}
|
||||
|
||||
// Windows ARM64: log detection (compiler is selected by CMake/toolchain)
|
||||
// Windows ARM64: automatically set required options
|
||||
if (isWindowsARM64) {
|
||||
console.log("Windows ARM64 detected");
|
||||
// Use clang-cl instead of MSVC cl.exe for proper ARM64 flag support
|
||||
if (!generateOptions["-DCMAKE_C_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_C_COMPILER"] = "clang-cl";
|
||||
}
|
||||
if (!generateOptions["-DCMAKE_CXX_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_CXX_COMPILER"] = "clang-cl";
|
||||
}
|
||||
// Skip codegen by default since x64 bun crashes under WoW64 emulation
|
||||
// Can be overridden with -DSKIP_CODEGEN=OFF once ARM64 bun is available
|
||||
if (!generateOptions["-DSKIP_CODEGEN"]) {
|
||||
generateOptions["-DSKIP_CODEGEN"] = "ON";
|
||||
}
|
||||
console.log("Windows ARM64 detected: using clang-cl and SKIP_CODEGEN=ON");
|
||||
}
|
||||
|
||||
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
|
||||
|
||||
@@ -5,22 +5,7 @@ $ErrorActionPreference = "Stop"
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
|
||||
# Allow overriding the target arch (useful for cross-compiling on x64 -> ARM64)
|
||||
$script:VsArch = $null
|
||||
if ($env:BUN_VS_ARCH) {
|
||||
switch ($env:BUN_VS_ARCH.ToLowerInvariant()) {
|
||||
"arm64" { $script:VsArch = "arm64" }
|
||||
"aarch64" { $script:VsArch = "arm64" }
|
||||
"amd64" { $script:VsArch = "amd64" }
|
||||
"x64" { $script:VsArch = "amd64" }
|
||||
default { throw "Invalid BUN_VS_ARCH: $env:BUN_VS_ARCH (expected arm64|amd64)" }
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $script:VsArch) {
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
}
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
if($env:VSINSTALLDIR -eq $null) {
|
||||
Write-Host "Loading Visual Studio environment, this may take a second..."
|
||||
@@ -32,29 +17,17 @@ if($env:VSINSTALLDIR -eq $null) {
|
||||
|
||||
$vsDir = (& $vswhere -prerelease -latest -property installationPath)
|
||||
if ($vsDir -eq $null) {
|
||||
# Check common VS installation paths
|
||||
$searchPaths = @(
|
||||
"C:\Program Files\Microsoft Visual Studio\2022",
|
||||
"C:\Program Files (x86)\Microsoft Visual Studio\2022"
|
||||
)
|
||||
foreach ($searchPath in $searchPaths) {
|
||||
if (Test-Path $searchPath) {
|
||||
$vsDir = (Get-ChildItem -Path $searchPath -Directory | Select-Object -First 1).FullName
|
||||
if ($vsDir -ne $null) { break }
|
||||
}
|
||||
}
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory
|
||||
if ($vsDir -eq $null) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
|
||||
# Visual Studio's Launch-VsDevShell.ps1 only supports x86/amd64 for HostArch
|
||||
# For ARM64 builds, use amd64 as HostArch since it can cross-compile to ARM64
|
||||
$hostArch = if ($script:VsArch -eq "arm64") { "amd64" } else { $script:VsArch }
|
||||
. $vsShell -Arch $script:VsArch -HostArch $hostArch
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
@@ -88,7 +61,7 @@ if ($args.Count -gt 0) {
|
||||
$displayArgs += $arg
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Write-Host "$ $command $displayArgs"
|
||||
& $command $commandArgs
|
||||
exit $LASTEXITCODE
|
||||
|
||||
@@ -10,6 +10,7 @@ pub const HardcodedModule = enum {
|
||||
@"bun:test",
|
||||
@"bun:wrap",
|
||||
@"bun:sqlite",
|
||||
@"bun:git",
|
||||
@"node:assert",
|
||||
@"node:assert/strict",
|
||||
@"node:async_hooks",
|
||||
@@ -98,6 +99,7 @@ pub const HardcodedModule = enum {
|
||||
.{ "bun:main", .@"bun:main" },
|
||||
.{ "bun:test", .@"bun:test" },
|
||||
.{ "bun:sqlite", .@"bun:sqlite" },
|
||||
.{ "bun:git", .@"bun:git" },
|
||||
.{ "bun:wrap", .@"bun:wrap" },
|
||||
.{ "bun:internal-for-testing", .@"bun:internal-for-testing" },
|
||||
// Node.js
|
||||
@@ -366,6 +368,7 @@ pub const HardcodedModule = enum {
|
||||
.{ "bun:ffi", .{ .path = "bun:ffi" } },
|
||||
.{ "bun:jsc", .{ .path = "bun:jsc" } },
|
||||
.{ "bun:sqlite", .{ .path = "bun:sqlite" } },
|
||||
.{ "bun:git", .{ .path = "bun:git" } },
|
||||
.{ "bun:wrap", .{ .path = "bun:wrap" } },
|
||||
.{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } },
|
||||
.{ "ffi", .{ .path = "bun:ffi" } },
|
||||
|
||||
@@ -256,7 +256,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
jsc.markBinding(@src());
|
||||
if (this.socket.isDetached()) return;
|
||||
const handlers = this.getHandlers();
|
||||
log("onTimeout {s}", .{if (handlers.mode == .server) "S" else "C"});
|
||||
log("onTimeout {s}", .{if (handlers.is_server) "S" else "C"});
|
||||
const callback = handlers.onTimeout;
|
||||
if (callback == .zero or this.flags.finalizing) return;
|
||||
if (handlers.vm.isShuttingDown()) {
|
||||
@@ -281,7 +281,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
|
||||
pub fn handleConnectError(this: *This, errno: c_int) bun.JSError!void {
|
||||
const handlers = this.getHandlers();
|
||||
log("onConnectError {s} ({d}, {d})", .{ if (handlers.mode == .server) "S" else "C", errno, this.ref_count.get() });
|
||||
log("onConnectError {s} ({d}, {d})", .{ if (handlers.is_server) "S" else "C", errno, this.ref_count.get() });
|
||||
// Ensure the socket is still alive for any defer's we have
|
||||
this.ref();
|
||||
defer this.deref();
|
||||
@@ -397,8 +397,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
}
|
||||
|
||||
pub fn isServer(this: *const This) bool {
|
||||
const handlers = this.getHandlers();
|
||||
return handlers.mode.isServer();
|
||||
return this.getHandlers().is_server;
|
||||
}
|
||||
|
||||
pub fn onOpen(this: *This, socket: Socket) void {
|
||||
@@ -503,7 +502,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
jsc.markBinding(@src());
|
||||
if (this.socket.isDetached()) return;
|
||||
const handlers = this.getHandlers();
|
||||
log("onEnd {s}", .{if (handlers.mode == .server) "S" else "C"});
|
||||
log("onEnd {s}", .{if (handlers.is_server) "S" else "C"});
|
||||
// Ensure the socket remains alive until this is finished
|
||||
this.ref();
|
||||
defer this.deref();
|
||||
@@ -535,7 +534,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
this.socket = s;
|
||||
if (this.socket.isDetached()) return;
|
||||
const handlers = this.getHandlers();
|
||||
log("onHandshake {s} ({d})", .{ if (handlers.mode == .server) "S" else "C", success });
|
||||
log("onHandshake {s} ({d})", .{ if (handlers.is_server) "S" else "C", success });
|
||||
|
||||
const authorized = if (success == 1) true else false;
|
||||
|
||||
@@ -572,7 +571,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
result = callback.call(globalObject, this_value, &[_]JSValue{this_value}) catch |err| globalObject.takeException(err);
|
||||
|
||||
// only call onOpen once for clients
|
||||
if (handlers.mode != .server) {
|
||||
if (!handlers.is_server) {
|
||||
// clean onOpen callback so only called in the first handshake and not in every renegotiation
|
||||
// on servers this would require a different approach but it's not needed because our servers will not call handshake multiple times
|
||||
// servers don't support renegotiation
|
||||
@@ -601,7 +600,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
pub fn onClose(this: *This, _: Socket, err: c_int, _: ?*anyopaque) bun.JSError!void {
|
||||
jsc.markBinding(@src());
|
||||
const handlers = this.getHandlers();
|
||||
log("onClose {s}", .{if (handlers.mode == .server) "S" else "C"});
|
||||
log("onClose {s}", .{if (handlers.is_server) "S" else "C"});
|
||||
this.detachNativeCallback();
|
||||
this.socket.detach();
|
||||
defer this.deref();
|
||||
@@ -649,7 +648,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
this.socket = s;
|
||||
if (this.socket.isDetached()) return;
|
||||
const handlers = this.getHandlers();
|
||||
log("onData {s} ({d})", .{ if (handlers.mode == .server) "S" else "C", data.len });
|
||||
log("onData {s} ({d})", .{ if (handlers.is_server) "S" else "C", data.len });
|
||||
if (this.native_callback.onData(data)) return;
|
||||
|
||||
const callback = handlers.onData;
|
||||
@@ -692,7 +691,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
pub fn getListener(this: *This, _: *jsc.JSGlobalObject) JSValue {
|
||||
const handlers = this.handlers orelse return .js_undefined;
|
||||
|
||||
if (handlers.mode != .server or this.socket.isDetached()) {
|
||||
if (!handlers.is_server or this.socket.isDetached()) {
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
@@ -1353,7 +1352,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
};
|
||||
|
||||
const this_handlers = this.getHandlers();
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, this_handlers.mode == .server);
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, this_handlers.is_server);
|
||||
this_handlers.deinit();
|
||||
this_handlers.* = handlers;
|
||||
|
||||
@@ -1381,9 +1380,6 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
if (this.socket.isDetached() or this.socket.isNamedPipe()) {
|
||||
return .js_undefined;
|
||||
}
|
||||
if (this.isServer()) {
|
||||
return globalObject.throw("Server-side upgradeTLS is not supported. Use upgradeDuplexToTLS with isServer: true instead.", .{});
|
||||
}
|
||||
const args = callframe.arguments_old(1);
|
||||
|
||||
if (args.len < 1) {
|
||||
@@ -1575,7 +1571,7 @@ pub fn NewSocket(comptime ssl: bool) type {
|
||||
this.socket.detach();
|
||||
|
||||
// start TLS handshake after we set extension on the socket
|
||||
new_socket.startTLS(handlers_ptr.mode != .server);
|
||||
new_socket.startTLS(!handlers_ptr.is_server);
|
||||
|
||||
success = true;
|
||||
return array;
|
||||
@@ -1758,23 +1754,6 @@ pub fn NewWrappedHandler(comptime tls: bool) type {
|
||||
};
|
||||
}
|
||||
|
||||
/// Unified socket mode replacing the old is_server bool + TLSMode pair.
|
||||
pub const SocketMode = enum {
|
||||
/// Default — TLS client or non-TLS socket
|
||||
client,
|
||||
/// Listener-owned server. TLS (if any) configured at the listener level.
|
||||
server,
|
||||
/// Duplex upgraded to TLS server role. Not listener-owned —
|
||||
/// markInactive uses client lifecycle path.
|
||||
duplex_server,
|
||||
|
||||
/// Returns true for any mode that acts as a TLS server (ALPN, handshake direction).
|
||||
/// Both .server and .duplex_server present as server to peers.
|
||||
pub fn isServer(this: SocketMode) bool {
|
||||
return this == .server or this == .duplex_server;
|
||||
}
|
||||
};
|
||||
|
||||
pub const DuplexUpgradeContext = struct {
|
||||
upgrade: uws.UpgradedDuplex,
|
||||
// We only us a tls and not a raw socket when upgrading a Duplex, Duplex dont support socketpairs
|
||||
@@ -1785,7 +1764,6 @@ pub const DuplexUpgradeContext = struct {
|
||||
task_event: EventState = .StartTLS,
|
||||
ssl_config: ?jsc.API.ServerConfig.SSLConfig,
|
||||
is_open: bool = false,
|
||||
#mode: SocketMode = .client,
|
||||
|
||||
pub const EventState = enum(u8) {
|
||||
StartTLS,
|
||||
@@ -1868,8 +1846,7 @@ pub const DuplexUpgradeContext = struct {
|
||||
switch (this.task_event) {
|
||||
.StartTLS => {
|
||||
if (this.ssl_config) |config| {
|
||||
log("DuplexUpgradeContext.startTLS mode={s}", .{@tagName(this.#mode)});
|
||||
this.upgrade.startTLS(config, this.#mode == .client) catch |err| {
|
||||
this.upgrade.startTLS(config, true) catch |err| {
|
||||
switch (err) {
|
||||
error.OutOfMemory => {
|
||||
bun.outOfMemory();
|
||||
@@ -1937,15 +1914,8 @@ pub fn jsUpgradeDuplexToTLS(globalObject: *jsc.JSGlobalObject, callframe: *jsc.C
|
||||
return globalObject.throw("Expected \"socket\" option", .{});
|
||||
};
|
||||
|
||||
var is_server = false;
|
||||
if (try opts.getTruthy(globalObject, "isServer")) |is_server_val| {
|
||||
is_server = is_server_val.toBoolean();
|
||||
}
|
||||
// Note: Handlers.fromJS is_server=false because these handlers are standalone
|
||||
// allocations (not embedded in a Listener). The mode field on Handlers
|
||||
// controls lifecycle (markInactive expects a Listener parent when .server).
|
||||
// The TLS direction (client vs server) is controlled by DuplexUpgradeContext.mode.
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, false);
|
||||
const is_server = false; // A duplex socket is always handled as a client
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, is_server);
|
||||
|
||||
var ssl_opts: ?jsc.API.ServerConfig.SSLConfig = null;
|
||||
if (try opts.getTruthy(globalObject, "tls")) |tls| {
|
||||
@@ -1967,9 +1937,6 @@ pub fn jsUpgradeDuplexToTLS(globalObject: *jsc.JSGlobalObject, callframe: *jsc.C
|
||||
|
||||
const handlers_ptr = bun.handleOom(handlers.vm.allocator.create(Handlers));
|
||||
handlers_ptr.* = handlers;
|
||||
// Set mode to duplex_server so TLSSocket.isServer() returns true for ALPN server mode
|
||||
// without affecting markInactive lifecycle (which requires a Listener parent).
|
||||
handlers_ptr.mode = if (is_server) .duplex_server else .client;
|
||||
var tls = bun.new(TLSSocket, .{
|
||||
.ref_count = .init(),
|
||||
.handlers = handlers_ptr,
|
||||
@@ -1996,7 +1963,6 @@ pub fn jsUpgradeDuplexToTLS(globalObject: *jsc.JSGlobalObject, callframe: *jsc.C
|
||||
.vm = globalObject.bunVM(),
|
||||
.task = undefined,
|
||||
.ssl_config = socket_config.*,
|
||||
.#mode = if (is_server) .duplex_server else .client,
|
||||
});
|
||||
tls.ref();
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ binary_type: BinaryType = .Buffer,
|
||||
vm: *jsc.VirtualMachine,
|
||||
globalObject: *jsc.JSGlobalObject,
|
||||
active_connections: u32 = 0,
|
||||
mode: SocketMode = .client,
|
||||
is_server: bool,
|
||||
promise: jsc.Strong.Optional = .empty,
|
||||
|
||||
protection_count: if (Environment.ci_assert) u32 else void = if (Environment.ci_assert) 0,
|
||||
@@ -81,7 +81,7 @@ pub fn markInactive(this: *Handlers) void {
|
||||
Listener.log("markInactive", .{});
|
||||
this.active_connections -= 1;
|
||||
if (this.active_connections == 0) {
|
||||
if (this.mode == .server) {
|
||||
if (this.is_server) {
|
||||
const listen_socket: *Listener = @fieldParentPtr("handlers", this);
|
||||
// allow it to be GC'd once the last connection is closed and it's not listening anymore
|
||||
if (listen_socket.listener == .none) {
|
||||
@@ -133,7 +133,7 @@ pub fn fromGenerated(
|
||||
var result: Handlers = .{
|
||||
.vm = globalObject.bunVM(),
|
||||
.globalObject = globalObject,
|
||||
.mode = if (is_server) .server else .client,
|
||||
.is_server = is_server,
|
||||
.binary_type = switch (generated.binary_type) {
|
||||
.arraybuffer => .ArrayBuffer,
|
||||
.buffer => .Buffer,
|
||||
@@ -217,7 +217,7 @@ pub fn clone(this: *const Handlers) Handlers {
|
||||
.vm = this.vm,
|
||||
.globalObject = this.globalObject,
|
||||
.binary_type = this.binary_type,
|
||||
.mode = this.mode,
|
||||
.is_server = this.is_server,
|
||||
};
|
||||
inline for (callback_fields) |field| {
|
||||
@field(result, field) = @field(this, field);
|
||||
@@ -346,7 +346,6 @@ const strings = bun.strings;
|
||||
const uws = bun.uws;
|
||||
const Listener = bun.api.Listener;
|
||||
const SSLConfig = bun.api.ServerConfig.SSLConfig;
|
||||
const SocketMode = bun.api.socket.SocketMode;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const JSValue = jsc.JSValue;
|
||||
|
||||
@@ -91,7 +91,7 @@ pub fn reload(this: *Listener, globalObject: *jsc.JSGlobalObject, callframe: *js
|
||||
return globalObject.throw("Expected \"socket\" object", .{});
|
||||
};
|
||||
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.mode == .server);
|
||||
const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server);
|
||||
this.handlers.deinit();
|
||||
this.handlers = handlers;
|
||||
|
||||
@@ -773,7 +773,7 @@ pub fn connectInner(globalObject: *jsc.JSGlobalObject, prev_maybe_tcp: ?*TCPSock
|
||||
|
||||
const handlers_ptr = bun.handleOom(handlers.vm.allocator.create(Handlers));
|
||||
handlers_ptr.* = handlers.*;
|
||||
handlers_ptr.mode = .client;
|
||||
handlers_ptr.is_server = false;
|
||||
|
||||
var promise = jsc.JSPromise.create(globalObject);
|
||||
const promise_value = promise.toJS();
|
||||
|
||||
@@ -173,10 +173,8 @@ pub fn SSLWrapper(comptime T: type) type {
|
||||
|
||||
// flush buffered data and returns amount of pending data to write
|
||||
pub fn flush(this: *This) usize {
|
||||
// handleTraffic may trigger a close callback which frees ssl,
|
||||
// so we must not capture the ssl pointer before calling it.
|
||||
this.handleTraffic();
|
||||
const ssl = this.ssl orelse return 0;
|
||||
this.handleTraffic();
|
||||
const pending = BoringSSL.BIO_ctrl_pending(BoringSSL.SSL_get_wbio(ssl));
|
||||
if (pending > 0) return @intCast(pending);
|
||||
return 0;
|
||||
@@ -430,8 +428,6 @@ pub fn SSLWrapper(comptime T: type) type {
|
||||
if (read > 0) {
|
||||
log("triggering data callback (read {d})", .{read});
|
||||
this.triggerDataCallback(buffer[0..read]);
|
||||
// The data callback may have closed the connection
|
||||
if (this.ssl == null or this.flags.closed_notified) return false;
|
||||
}
|
||||
this.triggerCloseCallback();
|
||||
return false;
|
||||
|
||||
@@ -82,8 +82,6 @@ JSC_DECLARE_HOST_FUNCTION(jsFunctionBunStripANSI);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunWrapAnsi);
|
||||
}
|
||||
|
||||
#include "CaseChange.h"
|
||||
|
||||
using namespace JSC;
|
||||
using namespace WebCore;
|
||||
|
||||
@@ -934,18 +932,14 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
allocUnsafe BunObject_callback_allocUnsafe DontDelete|Function 1
|
||||
argv BunObject_lazyPropCb_wrap_argv DontDelete|PropertyCallback
|
||||
build BunObject_callback_build DontDelete|Function 1
|
||||
camelCase jsFunctionBunCamelCase DontDelete|Function 1
|
||||
capitalCase jsFunctionBunCapitalCase DontDelete|Function 1
|
||||
concatArrayBuffers functionConcatTypedArrays DontDelete|Function 3
|
||||
connect BunObject_callback_connect DontDelete|Function 1
|
||||
constantCase jsFunctionBunConstantCase DontDelete|Function 1
|
||||
cwd BunObject_lazyPropCb_wrap_cwd DontEnum|DontDelete|PropertyCallback
|
||||
color BunObject_callback_color DontDelete|Function 2
|
||||
deepEquals functionBunDeepEquals DontDelete|Function 2
|
||||
deepMatch functionBunDeepMatch DontDelete|Function 2
|
||||
deflateSync BunObject_callback_deflateSync DontDelete|Function 1
|
||||
dns constructDNSObject ReadOnly|DontDelete|PropertyCallback
|
||||
dotCase jsFunctionBunDotCase DontDelete|Function 1
|
||||
enableANSIColors BunObject_lazyPropCb_wrap_enableANSIColors DontDelete|PropertyCallback
|
||||
env constructEnvObject ReadOnly|DontDelete|PropertyCallback
|
||||
escapeHTML functionBunEscapeHTML DontDelete|Function 2
|
||||
@@ -960,7 +954,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
indexOfLine BunObject_callback_indexOfLine DontDelete|Function 1
|
||||
inflateSync BunObject_callback_inflateSync DontDelete|Function 1
|
||||
inspect BunObject_lazyPropCb_wrap_inspect DontDelete|PropertyCallback
|
||||
kebabCase jsFunctionBunKebabCase DontDelete|Function 1
|
||||
isMainThread constructIsMainThread ReadOnly|DontDelete|PropertyCallback
|
||||
jest BunObject_callback_jest DontEnum|DontDelete|Function 1
|
||||
listen BunObject_callback_listen DontDelete|Function 1
|
||||
@@ -968,10 +961,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
main bunObjectMain DontDelete|CustomAccessor
|
||||
mmap BunObject_callback_mmap DontDelete|Function 1
|
||||
nanoseconds functionBunNanoseconds DontDelete|Function 0
|
||||
noCase jsFunctionBunNoCase DontDelete|Function 1
|
||||
openInEditor BunObject_callback_openInEditor DontDelete|Function 1
|
||||
pascalCase jsFunctionBunPascalCase DontDelete|Function 1
|
||||
pathCase jsFunctionBunPathCase DontDelete|Function 1
|
||||
origin BunObject_lazyPropCb_wrap_origin DontEnum|ReadOnly|DontDelete|PropertyCallback
|
||||
version_with_sha constructBunVersionWithSha DontEnum|ReadOnly|DontDelete|PropertyCallback
|
||||
password constructPasswordObject DontDelete|PropertyCallback
|
||||
@@ -992,8 +982,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
resolveSync BunObject_callback_resolveSync DontDelete|Function 1
|
||||
revision constructBunRevision ReadOnly|DontDelete|PropertyCallback
|
||||
semver BunObject_lazyPropCb_wrap_semver ReadOnly|DontDelete|PropertyCallback
|
||||
sentenceCase jsFunctionBunSentenceCase DontDelete|Function 1
|
||||
snakeCase jsFunctionBunSnakeCase DontDelete|Function 1
|
||||
sql defaultBunSQLObject DontDelete|PropertyCallback
|
||||
postgres defaultBunSQLObject DontDelete|PropertyCallback
|
||||
SQL constructBunSQLObject DontDelete|PropertyCallback
|
||||
@@ -1009,7 +997,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
stdout BunObject_lazyPropCb_wrap_stdout DontDelete|PropertyCallback
|
||||
stringWidth Generated::BunObject::jsStringWidth DontDelete|Function 2
|
||||
stripANSI jsFunctionBunStripANSI DontDelete|Function 1
|
||||
trainCase jsFunctionBunTrainCase DontDelete|Function 1
|
||||
wrapAnsi jsFunctionBunWrapAnsi DontDelete|Function 3
|
||||
Terminal BunObject_lazyPropCb_wrap_Terminal DontDelete|PropertyCallback
|
||||
unsafe BunObject_lazyPropCb_wrap_unsafe DontDelete|PropertyCallback
|
||||
|
||||
@@ -1,355 +0,0 @@
|
||||
#include "root.h"
|
||||
#include "CaseChange.h"
|
||||
|
||||
#include <unicode/uchar.h>
|
||||
#include <unicode/utf16.h>
|
||||
#include <wtf/text/StringBuilder.h>
|
||||
#include <wtf/text/WTFString.h>
|
||||
|
||||
namespace Bun {
|
||||
|
||||
using namespace JSC;
|
||||
using namespace WTF;
|
||||
|
||||
enum class CaseType {
|
||||
Camel,
|
||||
Pascal,
|
||||
Snake,
|
||||
Kebab,
|
||||
Constant,
|
||||
Dot,
|
||||
Capital,
|
||||
Train,
|
||||
Path,
|
||||
Sentence,
|
||||
No
|
||||
};
|
||||
|
||||
enum class CharClass {
|
||||
Lower,
|
||||
Upper,
|
||||
Digit,
|
||||
Other
|
||||
};
|
||||
|
||||
enum class WordTransform {
|
||||
Lower,
|
||||
Upper,
|
||||
Capitalize
|
||||
};
|
||||
|
||||
static inline CharClass classifyCp(char32_t c)
|
||||
{
|
||||
if (c < 0x80) {
|
||||
if (c >= 'a' && c <= 'z')
|
||||
return CharClass::Lower;
|
||||
if (c >= 'A' && c <= 'Z')
|
||||
return CharClass::Upper;
|
||||
if (c >= '0' && c <= '9')
|
||||
return CharClass::Digit;
|
||||
return CharClass::Other;
|
||||
}
|
||||
if (u_hasBinaryProperty(c, UCHAR_UPPERCASE))
|
||||
return CharClass::Upper;
|
||||
if (u_hasBinaryProperty(c, UCHAR_ALPHABETIC))
|
||||
return CharClass::Lower;
|
||||
return CharClass::Other;
|
||||
}
|
||||
|
||||
static inline char separator(CaseType type)
|
||||
{
|
||||
switch (type) {
|
||||
case CaseType::Camel:
|
||||
case CaseType::Pascal:
|
||||
return 0;
|
||||
case CaseType::Snake:
|
||||
case CaseType::Constant:
|
||||
return '_';
|
||||
case CaseType::Kebab:
|
||||
case CaseType::Train:
|
||||
return '-';
|
||||
case CaseType::Dot:
|
||||
return '.';
|
||||
case CaseType::Capital:
|
||||
case CaseType::Sentence:
|
||||
case CaseType::No:
|
||||
return ' ';
|
||||
case CaseType::Path:
|
||||
return '/';
|
||||
}
|
||||
RELEASE_ASSERT_NOT_REACHED();
|
||||
}
|
||||
|
||||
static inline bool hasDigitPrefixUnderscore(CaseType type)
|
||||
{
|
||||
return type == CaseType::Camel || type == CaseType::Pascal;
|
||||
}
|
||||
|
||||
static inline WordTransform getTransform(CaseType type, size_t wordIndex)
|
||||
{
|
||||
switch (type) {
|
||||
case CaseType::Camel:
|
||||
return wordIndex == 0 ? WordTransform::Lower : WordTransform::Capitalize;
|
||||
case CaseType::Pascal:
|
||||
return WordTransform::Capitalize;
|
||||
case CaseType::Snake:
|
||||
case CaseType::Kebab:
|
||||
case CaseType::Dot:
|
||||
case CaseType::Path:
|
||||
case CaseType::No:
|
||||
return WordTransform::Lower;
|
||||
case CaseType::Constant:
|
||||
return WordTransform::Upper;
|
||||
case CaseType::Capital:
|
||||
case CaseType::Train:
|
||||
return WordTransform::Capitalize;
|
||||
case CaseType::Sentence:
|
||||
return wordIndex == 0 ? WordTransform::Capitalize : WordTransform::Lower;
|
||||
}
|
||||
RELEASE_ASSERT_NOT_REACHED();
|
||||
}
|
||||
|
||||
// Word boundary detection and case conversion, templated on character type.
|
||||
// For Latin1Character, each element is a codepoint.
|
||||
// For UChar, we use U16_NEXT to handle surrogate pairs.
|
||||
template<typename CharType>
|
||||
static WTF::String convertCase(CaseType type, std::span<const CharType> input)
|
||||
{
|
||||
// First pass: collect word boundaries (start/end byte offsets)
|
||||
struct WordRange {
|
||||
uint32_t start;
|
||||
uint32_t end;
|
||||
};
|
||||
|
||||
Vector<WordRange, 16> words;
|
||||
{
|
||||
bool inWord = false;
|
||||
uint32_t wordStart = 0;
|
||||
uint32_t wordEnd = 0;
|
||||
CharClass prevClass = CharClass::Other;
|
||||
CharClass prevPrevClass = CharClass::Other;
|
||||
uint32_t prevPos = 0;
|
||||
|
||||
int32_t i = 0;
|
||||
int32_t length = static_cast<int32_t>(input.size());
|
||||
|
||||
while (i < length) {
|
||||
uint32_t curPos = static_cast<uint32_t>(i);
|
||||
char32_t cp;
|
||||
|
||||
if constexpr (std::is_same_v<CharType, Latin1Character>) {
|
||||
cp = input[i];
|
||||
i++;
|
||||
} else {
|
||||
U16_NEXT(input.data(), i, length, cp);
|
||||
}
|
||||
|
||||
uint32_t curEnd = static_cast<uint32_t>(i);
|
||||
CharClass curClass = classifyCp(cp);
|
||||
|
||||
if (curClass == CharClass::Other) {
|
||||
if (inWord) {
|
||||
inWord = false;
|
||||
words.append({ wordStart, wordEnd });
|
||||
prevClass = CharClass::Other;
|
||||
prevPrevClass = CharClass::Other;
|
||||
} else {
|
||||
prevClass = CharClass::Other;
|
||||
prevPrevClass = CharClass::Other;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inWord) {
|
||||
inWord = true;
|
||||
wordStart = curPos;
|
||||
wordEnd = curEnd;
|
||||
prevPrevClass = CharClass::Other;
|
||||
prevClass = curClass;
|
||||
prevPos = curPos;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Rule 2: upper+upper+lower → boundary before the last upper
|
||||
if (prevPrevClass == CharClass::Upper && prevClass == CharClass::Upper && curClass == CharClass::Lower) {
|
||||
words.append({ wordStart, prevPos });
|
||||
wordStart = prevPos;
|
||||
wordEnd = curEnd;
|
||||
prevPrevClass = prevClass;
|
||||
prevClass = curClass;
|
||||
prevPos = curPos;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Rule 1: (lower | digit) → upper boundary
|
||||
if ((prevClass == CharClass::Lower || prevClass == CharClass::Digit) && curClass == CharClass::Upper) {
|
||||
words.append({ wordStart, wordEnd });
|
||||
wordStart = curPos;
|
||||
wordEnd = curEnd;
|
||||
prevPrevClass = CharClass::Other;
|
||||
prevClass = curClass;
|
||||
prevPos = curPos;
|
||||
continue;
|
||||
}
|
||||
|
||||
// No boundary, extend current word
|
||||
wordEnd = curEnd;
|
||||
prevPrevClass = prevClass;
|
||||
prevClass = curClass;
|
||||
prevPos = curPos;
|
||||
}
|
||||
|
||||
// Flush last word
|
||||
if (inWord)
|
||||
words.append({ wordStart, wordEnd });
|
||||
}
|
||||
|
||||
if (words.isEmpty())
|
||||
return emptyString();
|
||||
|
||||
// Second pass: build the output string
|
||||
StringBuilder builder;
|
||||
builder.reserveCapacity(input.size() + input.size() / 4);
|
||||
|
||||
char sep = separator(type);
|
||||
|
||||
for (size_t wordIndex = 0; wordIndex < words.size(); wordIndex++) {
|
||||
auto& word = words[wordIndex];
|
||||
|
||||
// Separator between words
|
||||
if (wordIndex > 0 && sep)
|
||||
builder.append(sep);
|
||||
|
||||
// Digit-prefix underscore for camelCase/pascalCase
|
||||
if (wordIndex > 0 && hasDigitPrefixUnderscore(type)) {
|
||||
char32_t firstCp;
|
||||
if constexpr (std::is_same_v<CharType, Latin1Character>) {
|
||||
firstCp = input[word.start];
|
||||
} else {
|
||||
int32_t tmpI = word.start;
|
||||
U16_NEXT(input.data(), tmpI, static_cast<int32_t>(input.size()), firstCp);
|
||||
}
|
||||
if (firstCp >= '0' && firstCp <= '9')
|
||||
builder.append('_');
|
||||
}
|
||||
|
||||
WordTransform transform = getTransform(type, wordIndex);
|
||||
|
||||
// Iterate codepoints within the word and apply transform
|
||||
int32_t pos = word.start;
|
||||
int32_t end = word.end;
|
||||
bool isFirst = true;
|
||||
|
||||
while (pos < end) {
|
||||
char32_t cp;
|
||||
if constexpr (std::is_same_v<CharType, Latin1Character>) {
|
||||
cp = input[pos];
|
||||
pos++;
|
||||
} else {
|
||||
U16_NEXT(input.data(), pos, end, cp);
|
||||
}
|
||||
|
||||
char32_t transformed;
|
||||
switch (transform) {
|
||||
case WordTransform::Lower:
|
||||
transformed = u_tolower(cp);
|
||||
break;
|
||||
case WordTransform::Upper:
|
||||
transformed = u_toupper(cp);
|
||||
break;
|
||||
case WordTransform::Capitalize:
|
||||
transformed = isFirst ? u_toupper(cp) : u_tolower(cp);
|
||||
break;
|
||||
}
|
||||
isFirst = false;
|
||||
|
||||
builder.append(static_cast<char32_t>(transformed));
|
||||
}
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
static EncodedJSValue caseChangeImpl(CaseType type, JSGlobalObject* globalObject, CallFrame* callFrame)
|
||||
{
|
||||
auto& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
JSValue input = callFrame->argument(0);
|
||||
if (!input.isString()) {
|
||||
throwTypeError(globalObject, scope, "Expected a string argument"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
JSString* jsStr = input.toString(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
auto view = jsStr->view(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
if (view->isEmpty())
|
||||
return JSValue::encode(jsEmptyString(vm));
|
||||
|
||||
WTF::String result = view->is8Bit()
|
||||
? convertCase<Latin1Character>(type, view->span8())
|
||||
: convertCase<UChar>(type, view->span16());
|
||||
|
||||
return JSValue::encode(jsString(vm, WTF::move(result)));
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunCamelCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Camel, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunPascalCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Pascal, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunSnakeCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Snake, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunKebabCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Kebab, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunConstantCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Constant, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunDotCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Dot, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunCapitalCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Capital, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunTrainCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Train, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunPathCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Path, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunSentenceCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::Sentence, globalObject, callFrame);
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionBunNoCase, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
return caseChangeImpl(CaseType::No, globalObject, callFrame);
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
@@ -1,19 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "root.h"
|
||||
|
||||
namespace Bun {
|
||||
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunCamelCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunPascalCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunSnakeCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunKebabCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunConstantCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunDotCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunCapitalCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunTrainCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunPathCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunSentenceCase);
|
||||
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunNoCase);
|
||||
|
||||
}
|
||||
@@ -125,6 +125,7 @@
|
||||
#include "JSSocketAddressDTO.h"
|
||||
#include "JSReactElement.h"
|
||||
#include "JSSQLStatement.h"
|
||||
#include "git/JSGit.h"
|
||||
#include "JSStringDecoder.h"
|
||||
#include "JSTextEncoder.h"
|
||||
#include "JSTextEncoderStream.h"
|
||||
@@ -1868,6 +1869,16 @@ void GlobalObject::finishCreation(VM& vm)
|
||||
init.set(WebCore::createJSSQLStatementStructure(init.owner));
|
||||
});
|
||||
|
||||
m_JSGitRepositoryStructure.initLater(
|
||||
[](const Initializer<Structure>& init) {
|
||||
init.set(WebCore::createJSGitRepositoryStructure(init.owner));
|
||||
});
|
||||
|
||||
m_JSGitCommitStructure.initLater(
|
||||
[](const Initializer<Structure>& init) {
|
||||
init.set(WebCore::createJSGitCommitStructure(init.owner));
|
||||
});
|
||||
|
||||
m_V8GlobalInternals.initLater(
|
||||
[](const JSC::LazyProperty<JSC::JSGlobalObject, v8::shim::GlobalInternals>::Initializer& init) {
|
||||
init.set(
|
||||
|
||||
@@ -316,6 +316,9 @@ public:
|
||||
|
||||
Structure* JSSQLStatementStructure() const { return m_JSSQLStatementStructure.getInitializedOnMainThread(this); }
|
||||
|
||||
Structure* JSGitRepositoryStructure() const { return m_JSGitRepositoryStructure.getInitializedOnMainThread(this); }
|
||||
Structure* JSGitCommitStructure() const { return m_JSGitCommitStructure.getInitializedOnMainThread(this); }
|
||||
|
||||
v8::shim::GlobalInternals* V8GlobalInternals() const { return m_V8GlobalInternals.getInitializedOnMainThread(this); }
|
||||
|
||||
Bun::BakeAdditionsToGlobalObject& bakeAdditions() { return m_bakeAdditions; }
|
||||
@@ -620,6 +623,8 @@ public:
|
||||
V(private, LazyPropertyOfGlobalObject<Structure>, m_NapiTypeTagStructure) \
|
||||
\
|
||||
V(private, LazyPropertyOfGlobalObject<Structure>, m_JSSQLStatementStructure) \
|
||||
V(private, LazyPropertyOfGlobalObject<Structure>, m_JSGitRepositoryStructure) \
|
||||
V(private, LazyPropertyOfGlobalObject<Structure>, m_JSGitCommitStructure) \
|
||||
V(private, LazyPropertyOfGlobalObject<v8::shim::GlobalInternals>, m_V8GlobalInternals) \
|
||||
\
|
||||
V(public, LazyPropertyOfGlobalObject<JSObject>, m_bunObject) \
|
||||
|
||||
1305
src/bun.js/bindings/git/JSGit.cpp
Normal file
1305
src/bun.js/bindings/git/JSGit.cpp
Normal file
File diff suppressed because it is too large
Load Diff
140
src/bun.js/bindings/git/JSGit.h
Normal file
140
src/bun.js/bindings/git/JSGit.h
Normal file
@@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Copyright (C) 2024 Oven-sh
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
|
||||
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "root.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
|
||||
#include <JavaScriptCore/JSFunction.h>
|
||||
#include <JavaScriptCore/JSDestructibleObject.h>
|
||||
#include <JavaScriptCore/VM.h>
|
||||
|
||||
#include "headers-handwritten.h"
|
||||
#include "BunClientData.h"
|
||||
#include <JavaScriptCore/CallFrame.h>
|
||||
|
||||
// Forward declarations for libgit2 types
|
||||
typedef struct git_repository git_repository;
|
||||
typedef struct git_commit git_commit;
|
||||
typedef struct git_oid git_oid;
|
||||
|
||||
namespace WebCore {
|
||||
|
||||
// Forward declarations
|
||||
class JSGitRepository;
|
||||
class JSGitCommit;
|
||||
class JSGitOid;
|
||||
|
||||
// JSGitRepository - Wraps git_repository*
|
||||
class JSGitRepository final : public JSC::JSDestructibleObject {
|
||||
public:
|
||||
using Base = JSC::JSDestructibleObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSGitRepository* create(JSC::VM& vm, JSC::Structure* structure, git_repository* repo);
|
||||
static void destroy(JSC::JSCell* cell);
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return WebCore::subspaceForImpl<JSGitRepository, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceForJSGitRepository.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForJSGitRepository = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForJSGitRepository.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForJSGitRepository = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
}
|
||||
|
||||
git_repository* repository() const { return m_repo; }
|
||||
|
||||
private:
|
||||
JSGitRepository(JSC::VM& vm, JSC::Structure* structure, git_repository* repo)
|
||||
: Base(vm, structure)
|
||||
, m_repo(repo)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
|
||||
git_repository* m_repo { nullptr };
|
||||
};
|
||||
|
||||
// JSGitCommit - Wraps git_commit*
|
||||
class JSGitCommit final : public JSC::JSDestructibleObject {
|
||||
public:
|
||||
using Base = JSC::JSDestructibleObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSGitCommit* create(JSC::VM& vm, JSC::Structure* structure, git_commit* commit);
|
||||
static void destroy(JSC::JSCell* cell);
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return WebCore::subspaceForImpl<JSGitCommit, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceForJSGitCommit.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForJSGitCommit = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForJSGitCommit.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForJSGitCommit = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
}
|
||||
|
||||
git_commit* commit() const { return m_commit; }
|
||||
|
||||
private:
|
||||
JSGitCommit(JSC::VM& vm, JSC::Structure* structure, git_commit* commit)
|
||||
: Base(vm, structure)
|
||||
, m_commit(commit)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
|
||||
git_commit* m_commit { nullptr };
|
||||
};
|
||||
|
||||
// Structure creation functions
|
||||
JSC::Structure* createJSGitRepositoryStructure(JSC::JSGlobalObject* globalObject);
|
||||
JSC::Structure* createJSGitCommitStructure(JSC::JSGlobalObject* globalObject);
|
||||
|
||||
// Module creation function (called from $cpp)
|
||||
JSC::JSValue createJSGitModule(Zig::GlobalObject* globalObject);
|
||||
|
||||
} // namespace WebCore
|
||||
@@ -81,38 +81,29 @@ size_t IndexOfAnyCharImpl(const uint8_t* HWY_RESTRICT text, size_t text_len, con
|
||||
} else {
|
||||
ASSERT(chars_len <= 16);
|
||||
|
||||
const size_t simd_text_len = text_len - (text_len % N);
|
||||
size_t i = 0;
|
||||
|
||||
#if !HWY_HAVE_SCALABLE && !HWY_TARGET_IS_SVE
|
||||
// Preload search characters into native-width vectors.
|
||||
// On non-SVE targets, Vec has a known size and can be stored in arrays.
|
||||
// Use FixedTag to preload search characters into fixed-size vectors.
|
||||
// ScalableTag vectors (SVE) are sizeless and cannot be stored in arrays.
|
||||
// FixedTag gives us a known compile-time size that can be stored in arrays,
|
||||
// then ResizeBitCast converts back to scalable vectors in the inner loop.
|
||||
static constexpr size_t kMaxPreloadedChars = 16;
|
||||
hn::Vec<D8> char_vecs[kMaxPreloadedChars];
|
||||
const hn::FixedTag<uint8_t, 16> d_fixed;
|
||||
using VecFixed = hn::Vec<decltype(d_fixed)>;
|
||||
VecFixed char_vecs[kMaxPreloadedChars];
|
||||
const size_t num_chars_to_preload = std::min(chars_len, kMaxPreloadedChars);
|
||||
for (size_t c = 0; c < num_chars_to_preload; ++c) {
|
||||
char_vecs[c] = hn::Set(d, chars[c]);
|
||||
char_vecs[c] = hn::Set(d_fixed, chars[c]);
|
||||
}
|
||||
|
||||
const size_t simd_text_len = text_len - (text_len % N);
|
||||
size_t i = 0;
|
||||
|
||||
for (; i < simd_text_len; i += N) {
|
||||
const auto text_vec = hn::LoadN(d, text + i, N);
|
||||
auto found_mask = hn::MaskFalse(d);
|
||||
|
||||
for (size_t c = 0; c < num_chars_to_preload; ++c) {
|
||||
found_mask = hn::Or(found_mask, hn::Eq(text_vec, char_vecs[c]));
|
||||
found_mask = hn::Or(found_mask, hn::Eq(text_vec, hn::ResizeBitCast(d, char_vecs[c])));
|
||||
}
|
||||
#else
|
||||
// SVE types are sizeless and cannot be stored in arrays.
|
||||
// hn::Set is a single broadcast instruction; the compiler will
|
||||
// hoist these loop-invariant broadcasts out of the outer loop.
|
||||
for (; i < simd_text_len; i += N) {
|
||||
const auto text_vec = hn::LoadN(d, text + i, N);
|
||||
auto found_mask = hn::MaskFalse(d);
|
||||
|
||||
for (size_t c = 0; c < chars_len; ++c) {
|
||||
found_mask = hn::Or(found_mask, hn::Eq(text_vec, hn::Set(d, chars[c])));
|
||||
}
|
||||
#endif
|
||||
|
||||
const intptr_t pos = hn::FindFirstTrue(d, found_mask);
|
||||
if (pos >= 0) {
|
||||
|
||||
@@ -24,6 +24,8 @@ public:
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForNapiPrototype;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSSQLStatement;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSSQLStatementConstructor;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSGitRepository;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSGitCommit;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSSinkConstructor;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSSinkController;
|
||||
std::unique_ptr<GCClient::IsoSubspace> m_clientSubspaceForJSSink;
|
||||
|
||||
@@ -24,6 +24,8 @@ public:
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForNapiPrototype;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSSQLStatement;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSSQLStatementConstructor;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSGitRepository;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSGitCommit;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSSinkConstructor;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSSinkController;
|
||||
std::unique_ptr<IsoSubspace> m_subspaceForJSSink;
|
||||
|
||||
@@ -14,10 +14,15 @@ param(
|
||||
[Switch]$DownloadWithoutCurl = $false
|
||||
);
|
||||
|
||||
# filter out 32 bit + ARM
|
||||
if (-not ((Get-CimInstance Win32_ComputerSystem)).SystemType -match "x64-based") {
|
||||
# Detect system architecture
|
||||
$SystemType = (Get-CimInstance Win32_ComputerSystem).SystemType
|
||||
if ($SystemType -match "ARM64-based") {
|
||||
$IsArm64 = $true
|
||||
} elseif ($SystemType -match "x64-based") {
|
||||
$IsArm64 = $false
|
||||
} else {
|
||||
Write-Output "Install Failed:"
|
||||
Write-Output "Bun for Windows is currently only available for x86 64-bit Windows.`n"
|
||||
Write-Output "Bun for Windows is currently only available for x86 64-bit and ARM64 Windows.`n"
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -103,13 +108,18 @@ function Install-Bun {
|
||||
$Version = "bun-$Version"
|
||||
}
|
||||
|
||||
$Arch = "x64"
|
||||
$IsBaseline = $ForceBaseline
|
||||
if (!$IsBaseline) {
|
||||
$IsBaseline = !( `
|
||||
Add-Type -MemberDefinition '[DllImport("kernel32.dll")] public static extern bool IsProcessorFeaturePresent(int ProcessorFeature);' `
|
||||
-Name 'Kernel32' -Namespace 'Win32' -PassThru `
|
||||
)::IsProcessorFeaturePresent(40);
|
||||
if ($IsArm64) {
|
||||
$Arch = "aarch64"
|
||||
$IsBaseline = $false
|
||||
} else {
|
||||
$Arch = "x64"
|
||||
$IsBaseline = $ForceBaseline
|
||||
if (!$IsBaseline) {
|
||||
$IsBaseline = !( `
|
||||
Add-Type -MemberDefinition '[DllImport("kernel32.dll")] public static extern bool IsProcessorFeaturePresent(int ProcessorFeature);' `
|
||||
-Name 'Kernel32' -Namespace 'Win32' -PassThru `
|
||||
)::IsProcessorFeaturePresent(40);
|
||||
}
|
||||
}
|
||||
|
||||
$BunRoot = if ($env:BUN_INSTALL) { $env:BUN_INSTALL } else { "${Home}\.bun" }
|
||||
@@ -219,7 +229,8 @@ function Install-Bun {
|
||||
# I want to keep this error message in for a few months to ensure that
|
||||
# if someone somehow runs into this, it can be reported.
|
||||
Write-Output "Install Failed - You are missing a DLL required to run bun.exe"
|
||||
Write-Output "This can be solved by installing the Visual C++ Redistributable from Microsoft:`nSee https://learn.microsoft.com/cpp/windows/latest-supported-vc-redist`nDirect Download -> https://aka.ms/vs/17/release/vc_redist.x64.exe`n`n"
|
||||
$VCRedistArch = if ($Arch -eq "aarch64") { "arm64" } else { "x64" }
|
||||
Write-Output "This can be solved by installing the Visual C++ Redistributable from Microsoft:`nSee https://learn.microsoft.com/cpp/windows/latest-supported-vc-redist`nDirect Download -> https://aka.ms/vs/17/release/vc_redist.${VCRedistArch}.exe`n`n"
|
||||
Write-Output "The error above should be unreachable as Bun does not depend on this library. Please comment in https://github.com/oven-sh/bun/issues/8598 or open a new issue.`n`n"
|
||||
Write-Output "The command '${BunBin}\bun.exe --revision' exited with code ${LASTEXITCODE}`n"
|
||||
return 1
|
||||
|
||||
306
src/js/bun/git.ts
Normal file
306
src/js/bun/git.ts
Normal file
@@ -0,0 +1,306 @@
|
||||
// Hardcoded module "bun:git"
|
||||
|
||||
let Git: any;
|
||||
|
||||
function initializeGit() {
|
||||
Git = $cpp("git/JSGit.cpp", "createJSGitModule");
|
||||
}
|
||||
|
||||
interface Signature {
|
||||
name: string;
|
||||
email: string;
|
||||
time: number; // Unix timestamp in milliseconds
|
||||
}
|
||||
|
||||
interface StatusOptions {
|
||||
includeUntracked?: boolean;
|
||||
includeIgnored?: boolean;
|
||||
recurseUntrackedDirs?: boolean;
|
||||
detectRenames?: boolean;
|
||||
}
|
||||
|
||||
interface InternalStatusEntry {
|
||||
path: string;
|
||||
status: number;
|
||||
}
|
||||
|
||||
interface IndexEntry {
|
||||
path: string;
|
||||
mode: number;
|
||||
oid: string;
|
||||
stage: number;
|
||||
size: number;
|
||||
}
|
||||
|
||||
interface DiffOptions {
|
||||
cached?: boolean;
|
||||
}
|
||||
|
||||
interface DiffFile {
|
||||
status: number;
|
||||
oldPath: string | null;
|
||||
newPath: string;
|
||||
similarity?: number;
|
||||
}
|
||||
|
||||
interface DiffResult {
|
||||
files: DiffFile[];
|
||||
stats: {
|
||||
filesChanged: number;
|
||||
insertions: number;
|
||||
deletions: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface LogOptions {
|
||||
from?: string;
|
||||
range?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
// Status constants (nodegit compatible)
|
||||
const Status = {
|
||||
CURRENT: 0,
|
||||
INDEX_NEW: 1,
|
||||
INDEX_MODIFIED: 2,
|
||||
INDEX_DELETED: 4,
|
||||
INDEX_RENAMED: 8,
|
||||
INDEX_TYPECHANGE: 16,
|
||||
WT_NEW: 128,
|
||||
WT_MODIFIED: 256,
|
||||
WT_DELETED: 512,
|
||||
WT_TYPECHANGE: 1024,
|
||||
WT_RENAMED: 2048,
|
||||
IGNORED: 16384,
|
||||
CONFLICTED: 32768,
|
||||
};
|
||||
|
||||
// DeltaType constants (nodegit compatible)
|
||||
const DeltaType = {
|
||||
UNMODIFIED: 0,
|
||||
ADDED: 1,
|
||||
DELETED: 2,
|
||||
MODIFIED: 3,
|
||||
RENAMED: 4,
|
||||
COPIED: 5,
|
||||
IGNORED: 6,
|
||||
UNTRACKED: 7,
|
||||
TYPECHANGE: 8,
|
||||
CONFLICTED: 10,
|
||||
};
|
||||
|
||||
class StatusEntry {
|
||||
path: string;
|
||||
status: number;
|
||||
|
||||
constructor(entry: InternalStatusEntry) {
|
||||
this.path = entry.path;
|
||||
this.status = entry.status;
|
||||
}
|
||||
|
||||
isNew(): boolean {
|
||||
return (this.status & (Status.INDEX_NEW | Status.WT_NEW)) !== 0;
|
||||
}
|
||||
|
||||
isModified(): boolean {
|
||||
return (this.status & (Status.INDEX_MODIFIED | Status.WT_MODIFIED)) !== 0;
|
||||
}
|
||||
|
||||
isDeleted(): boolean {
|
||||
return (this.status & (Status.INDEX_DELETED | Status.WT_DELETED)) !== 0;
|
||||
}
|
||||
|
||||
isRenamed(): boolean {
|
||||
return (this.status & (Status.INDEX_RENAMED | Status.WT_RENAMED)) !== 0;
|
||||
}
|
||||
|
||||
isIgnored(): boolean {
|
||||
return (this.status & Status.IGNORED) !== 0;
|
||||
}
|
||||
|
||||
inIndex(): boolean {
|
||||
return (
|
||||
(this.status &
|
||||
(Status.INDEX_NEW |
|
||||
Status.INDEX_MODIFIED |
|
||||
Status.INDEX_DELETED |
|
||||
Status.INDEX_RENAMED |
|
||||
Status.INDEX_TYPECHANGE)) !==
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
inWorkingTree(): boolean {
|
||||
return (
|
||||
(this.status &
|
||||
(Status.WT_NEW | Status.WT_MODIFIED | Status.WT_DELETED | Status.WT_TYPECHANGE | Status.WT_RENAMED)) !==
|
||||
0
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class Repository {
|
||||
#repo: any;
|
||||
|
||||
constructor(repo: any) {
|
||||
this.#repo = repo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an existing Git repository
|
||||
*/
|
||||
static open(path: string): Repository {
|
||||
if (!Git) {
|
||||
initializeGit();
|
||||
}
|
||||
const repo = Git.Repository.open(path);
|
||||
return new Repository(repo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HEAD commit
|
||||
*/
|
||||
head(): Commit {
|
||||
const commit = this.#repo.head();
|
||||
return new Commit(commit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the .git directory path
|
||||
*/
|
||||
get path(): string {
|
||||
return this.#repo.path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the working directory path (null for bare repositories)
|
||||
*/
|
||||
get workdir(): string | null {
|
||||
return this.#repo.workdir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a bare repository
|
||||
*/
|
||||
get isBare(): boolean {
|
||||
return this.#repo.isBare;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the working directory status (nodegit compatible)
|
||||
*/
|
||||
getStatus(options?: StatusOptions): StatusEntry[] {
|
||||
const entries = this.#repo.getStatus(options);
|
||||
return entries.map((e: InternalStatusEntry) => new StatusEntry(e));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a revision spec to an OID
|
||||
*/
|
||||
revParse(spec: string): string {
|
||||
return this.#repo.revParse(spec);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the current branch (null if detached HEAD or no commits)
|
||||
*/
|
||||
getCurrentBranch(): string | null {
|
||||
return this.#repo.getCurrentBranch();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ahead/behind counts between two commits
|
||||
*/
|
||||
aheadBehind(local?: string, upstream?: string): { ahead: number; behind: number } {
|
||||
return this.#repo.aheadBehind(local, upstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of files in the index
|
||||
*/
|
||||
listFiles(): IndexEntry[] {
|
||||
return this.#repo.listFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diff information
|
||||
*/
|
||||
diff(options?: DiffOptions): DiffResult {
|
||||
return this.#repo.diff(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Count commits in a range
|
||||
*/
|
||||
countCommits(range?: string): number {
|
||||
return this.#repo.countCommits(range);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get commit history
|
||||
*/
|
||||
log(options?: LogOptions): Commit[] {
|
||||
const commits = this.#repo.log(options);
|
||||
return commits.map((c: any) => new Commit(c));
|
||||
}
|
||||
}
|
||||
|
||||
class Commit {
|
||||
#commit: any;
|
||||
|
||||
constructor(commit: any) {
|
||||
this.#commit = commit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the commit OID (SHA-1 hash)
|
||||
*/
|
||||
get id(): string {
|
||||
return this.#commit.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full commit message
|
||||
*/
|
||||
get message(): string {
|
||||
return this.#commit.message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first line of the commit message
|
||||
*/
|
||||
get summary(): string {
|
||||
return this.#commit.summary;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the author signature
|
||||
*/
|
||||
get author(): Signature {
|
||||
return this.#commit.author;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the committer signature
|
||||
*/
|
||||
get committer(): Signature {
|
||||
return this.#commit.committer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the commit time as Unix timestamp (seconds since epoch)
|
||||
*/
|
||||
get time(): number {
|
||||
return this.#commit.time;
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
__esModule: true,
|
||||
Repository,
|
||||
Commit,
|
||||
StatusEntry,
|
||||
Status,
|
||||
DeltaType,
|
||||
default: Repository,
|
||||
};
|
||||
@@ -1,395 +0,0 @@
|
||||
const { Duplex } = require("node:stream");
|
||||
const upgradeDuplexToTLS = $newZigFunction("socket.zig", "jsUpgradeDuplexToTLS", 2);
|
||||
|
||||
interface NativeHandle {
|
||||
resume(): void;
|
||||
close(): void;
|
||||
end(): void;
|
||||
$write(chunk: Buffer, encoding: string): boolean;
|
||||
alpnProtocol?: string;
|
||||
}
|
||||
|
||||
interface UpgradeContextType {
|
||||
connectionListener: (...args: any[]) => any;
|
||||
server: Http2SecureServer;
|
||||
rawSocket: import("node:net").Socket;
|
||||
nativeHandle: NativeHandle | null;
|
||||
events: [(...args: any[]) => void, ...Function[]] | null;
|
||||
}
|
||||
|
||||
interface Http2SecureServer {
|
||||
key?: Buffer;
|
||||
cert?: Buffer;
|
||||
ca?: Buffer;
|
||||
passphrase?: string;
|
||||
ALPNProtocols?: Buffer;
|
||||
_requestCert?: boolean;
|
||||
_rejectUnauthorized?: boolean;
|
||||
emit(event: string, ...args: any[]): boolean;
|
||||
}
|
||||
|
||||
interface TLSProxySocket {
|
||||
_ctx: UpgradeContextType;
|
||||
_writeCallback: ((err?: Error | null) => void) | null;
|
||||
alpnProtocol: string | null;
|
||||
authorized: boolean;
|
||||
encrypted: boolean;
|
||||
server: Http2SecureServer;
|
||||
_requestCert: boolean;
|
||||
_rejectUnauthorized: boolean;
|
||||
_securePending: boolean;
|
||||
secureConnecting: boolean;
|
||||
_secureEstablished: boolean;
|
||||
authorizationError?: string;
|
||||
push(chunk: Buffer | null): boolean;
|
||||
destroy(err?: Error): this;
|
||||
emit(event: string, ...args: any[]): boolean;
|
||||
resume(): void;
|
||||
readonly destroyed: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Context object holding upgrade-time state for the TLS proxy socket.
|
||||
* Attached as `tlsSocket._ctx` so named functions can reach it via `this._ctx`
|
||||
* (Duplex methods) or via a bound `this` (socket callbacks).
|
||||
*/
|
||||
function UpgradeContext(
|
||||
connectionListener: (...args: any[]) => any,
|
||||
server: Http2SecureServer,
|
||||
rawSocket: import("node:net").Socket,
|
||||
) {
|
||||
this.connectionListener = connectionListener;
|
||||
this.server = server;
|
||||
this.rawSocket = rawSocket;
|
||||
this.nativeHandle = null;
|
||||
this.events = null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Duplex stream methods — called with `this` = tlsSocket (standard stream API)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// _read: called by stream machinery when the H2 session wants data.
|
||||
// Resume the native TLS handle so it feeds decrypted data via the data callback.
|
||||
// Mirrors net.ts Socket.prototype._read which calls socket.resume().
|
||||
function tlsSocketRead(this: TLSProxySocket) {
|
||||
const h = this._ctx.nativeHandle;
|
||||
if (h) {
|
||||
h.resume();
|
||||
}
|
||||
this._ctx.rawSocket.resume();
|
||||
}
|
||||
|
||||
// _write: called when the H2 session writes outbound frames.
|
||||
// Forward to the native TLS handle for encryption, then back to rawSocket.
|
||||
// Mirrors net.ts Socket.prototype._write which calls socket.$write().
|
||||
function tlsSocketWrite(this: TLSProxySocket, chunk: Buffer, encoding: string, callback: (err?: Error) => void) {
|
||||
const h = this._ctx.nativeHandle;
|
||||
if (!h) {
|
||||
callback(new Error("Socket is closed"));
|
||||
return;
|
||||
}
|
||||
// $write returns true if fully flushed, false if buffered
|
||||
if (h.$write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
// Store callback so drain event can invoke it (backpressure)
|
||||
this._writeCallback = callback;
|
||||
}
|
||||
}
|
||||
|
||||
// _destroy: called when the stream is destroyed (e.g. tlsSocket.destroy(err)).
|
||||
// Cleans up the native TLS handle.
|
||||
// Mirrors net.ts Socket.prototype._destroy.
|
||||
function tlsSocketDestroy(this: TLSProxySocket, err: Error | null, callback: (err?: Error | null) => void) {
|
||||
const h = this._ctx.nativeHandle;
|
||||
if (h) {
|
||||
h.close();
|
||||
this._ctx.nativeHandle = null;
|
||||
}
|
||||
// Must invoke pending write callback with error per Writable stream contract
|
||||
const writeCb = this._writeCallback;
|
||||
if (writeCb) {
|
||||
this._writeCallback = null;
|
||||
writeCb(err ?? new Error("Socket destroyed"));
|
||||
}
|
||||
callback(err);
|
||||
}
|
||||
|
||||
// _final: called when the writable side is ending (all data flushed).
|
||||
// Shuts down the TLS write side gracefully.
|
||||
// Mirrors net.ts Socket.prototype._final.
|
||||
function tlsSocketFinal(this: TLSProxySocket, callback: () => void) {
|
||||
const h = this._ctx.nativeHandle;
|
||||
if (!h) return callback();
|
||||
// Signal end-of-stream to the TLS layer
|
||||
h.end();
|
||||
callback();
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Socket callbacks — called by Zig with `this` = native handle (not useful).
|
||||
// All are bound to tlsSocket so `this` inside each = tlsSocket.
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// open: called when the TLS layer is initialized (before handshake).
|
||||
// No action needed; we wait for the handshake callback.
|
||||
function socketOpen() {}
|
||||
|
||||
// data: called with decrypted plaintext after the TLS layer decrypts incoming data.
|
||||
// Push into tlsSocket so the H2 session's _read() receives these frames.
|
||||
function socketData(this: TLSProxySocket, _socket: NativeHandle, chunk: Buffer) {
|
||||
if (!this.push(chunk)) {
|
||||
this._ctx.rawSocket.pause();
|
||||
}
|
||||
}
|
||||
|
||||
// end: TLS peer signaled end-of-stream; signal EOF to the H2 session.
|
||||
function socketEnd(this: TLSProxySocket) {
|
||||
this.push(null);
|
||||
}
|
||||
|
||||
// drain: raw socket is writable again after being full; propagate backpressure signal.
|
||||
// If _write stored a callback waiting for drain, invoke it now.
|
||||
function socketDrain(this: TLSProxySocket) {
|
||||
const cb = this._writeCallback;
|
||||
if (cb) {
|
||||
this._writeCallback = null;
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
// close: TLS connection closed; tear down the tlsSocket Duplex.
|
||||
function socketClose(this: TLSProxySocket) {
|
||||
if (!this.destroyed) {
|
||||
this.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
// error: TLS-level error (e.g. certificate verification failure).
|
||||
// In server mode without _requestCert, the server doesn't request a client cert,
|
||||
// so issuer verification errors on the server's own cert are non-fatal.
|
||||
function socketError(this: TLSProxySocket, _socket: NativeHandle, err: NodeJS.ErrnoException) {
|
||||
const ctx = this._ctx;
|
||||
if (!ctx.server._requestCert && err?.code === "UNABLE_TO_GET_ISSUER_CERT") {
|
||||
return;
|
||||
}
|
||||
this.destroy(err);
|
||||
}
|
||||
|
||||
// timeout: socket idle timeout; forward to the Duplex so H2 session can handle it.
|
||||
function socketTimeout(this: TLSProxySocket) {
|
||||
this.emit("timeout");
|
||||
}
|
||||
|
||||
// handshake: TLS handshake completed. This is the critical callback that triggers
|
||||
// H2 session creation.
|
||||
//
|
||||
// Mirrors the handshake logic in net.ts ServerHandlers.handshake:
|
||||
// - Set secure-connection state flags on tlsSocket
|
||||
// - Read alpnProtocol from the native handle (set by ALPN negotiation)
|
||||
// - Handle _requestCert / _rejectUnauthorized for mutual TLS
|
||||
// - Call connectionListener to create the ServerHttp2Session
|
||||
function socketHandshake(
|
||||
this: TLSProxySocket,
|
||||
nativeHandle: NativeHandle,
|
||||
success: boolean,
|
||||
verifyError: NodeJS.ErrnoException | null,
|
||||
) {
|
||||
const tlsSocket = this; // bound
|
||||
const ctx = tlsSocket._ctx;
|
||||
|
||||
if (!success) {
|
||||
const err = verifyError || new Error("TLS handshake failed");
|
||||
ctx.server.emit("tlsClientError", err, tlsSocket);
|
||||
tlsSocket.destroy(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark TLS handshake as complete on the proxy socket
|
||||
tlsSocket._securePending = false;
|
||||
tlsSocket.secureConnecting = false;
|
||||
tlsSocket._secureEstablished = true;
|
||||
|
||||
// Copy the negotiated ALPN protocol (e.g. "h2") from the native TLS handle.
|
||||
// The H2 session checks this to confirm HTTP/2 was negotiated.
|
||||
tlsSocket.alpnProtocol = nativeHandle?.alpnProtocol ?? null;
|
||||
|
||||
// Handle mutual TLS: if the server requested a client cert, check for errors
|
||||
if (tlsSocket._requestCert || tlsSocket._rejectUnauthorized) {
|
||||
if (verifyError) {
|
||||
tlsSocket.authorized = false;
|
||||
tlsSocket.authorizationError = verifyError.code || verifyError.message;
|
||||
ctx.server.emit("tlsClientError", verifyError, tlsSocket);
|
||||
if (tlsSocket._rejectUnauthorized) {
|
||||
tlsSocket.emit("secure", tlsSocket);
|
||||
tlsSocket.destroy(verifyError);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
tlsSocket.authorized = true;
|
||||
}
|
||||
} else {
|
||||
tlsSocket.authorized = true;
|
||||
}
|
||||
|
||||
// Invoke the H2 connectionListener which creates a ServerHttp2Session.
|
||||
// This is the same function passed to Http2SecureServer's constructor
|
||||
// and is what normally fires on the 'secureConnection' event.
|
||||
ctx.connectionListener.$call(ctx.server, tlsSocket);
|
||||
|
||||
// Resume the Duplex so the H2 session can read frames from it.
|
||||
// Mirrors net.ts ServerHandlers.handshake line 438: `self.resume()`.
|
||||
tlsSocket.resume();
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Close-cleanup handler
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// onTlsClose: when the TLS socket closes (e.g. H2 session destroyed), clean up
|
||||
// the raw socket listeners to prevent memory leaks and stale callback references.
|
||||
// EventEmitter calls 'close' handlers with `this` = emitter (tlsSocket).
|
||||
function onTlsClose(this: TLSProxySocket) {
|
||||
const ctx = this._ctx;
|
||||
const raw = ctx.rawSocket;
|
||||
const ev = ctx.events;
|
||||
if (!ev) return;
|
||||
raw.removeListener("data", ev[0]);
|
||||
raw.removeListener("end", ev[1]);
|
||||
raw.removeListener("drain", ev[2]);
|
||||
raw.removeListener("close", ev[3]);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Module-scope noop (replaces anonymous () => {} for the error suppression)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// no-op handler used to suppress unhandled error events until
|
||||
// the H2 session attaches its own error handler.
|
||||
function noop() {}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Main upgrade function
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Upgrades a raw TCP socket to TLS and initiates an H2 session on it.
|
||||
//
|
||||
// When a net.Server forwards an accepted TCP connection to an Http2SecureServer
|
||||
// via `h2Server.emit('connection', socket)`, the socket has not been TLS-upgraded.
|
||||
// Node.js Http2SecureServer expects to receive this and perform the upgrade itself.
|
||||
//
|
||||
// This mirrors the TLS server handshake pattern from net.ts ServerHandlers, but
|
||||
// targets the H2 connectionListener instead of a generic secureConnection event.
|
||||
//
|
||||
// Data flow after upgrade:
|
||||
// rawSocket (TCP) → upgradeDuplexToTLS (Zig TLS layer) → socket callbacks
|
||||
// → tlsSocket.push() → H2 session reads
|
||||
// H2 session writes → tlsSocket._write() → handle.$write() → Zig TLS layer → rawSocket
|
||||
//
|
||||
// CRITICAL: We do NOT set tlsSocket._handle to the native TLS handle.
|
||||
// If we did, the H2FrameParser constructor would detect it as a JSTLSSocket
|
||||
// and call attachNativeCallback(), which intercepts all decrypted data at the
|
||||
// Zig level, completely bypassing our JS data callback and Duplex.push() path.
|
||||
// Instead, we store the handle in _ctx.nativeHandle so _read/_write/_destroy
|
||||
// can use it, while the H2 session sees _handle as null and uses the JS-level
|
||||
// socket.on("data") → Duplex → parser.read() path for incoming frames.
|
||||
function upgradeRawSocketToH2(
|
||||
connectionListener: (...args: any[]) => any,
|
||||
server: Http2SecureServer,
|
||||
rawSocket: import("node:net").Socket,
|
||||
): boolean {
|
||||
// Create a Duplex stream that acts as the TLS "socket" from the H2 session's perspective.
|
||||
const tlsSocket = new Duplex() as unknown as TLSProxySocket;
|
||||
tlsSocket._ctx = new UpgradeContext(connectionListener, server, rawSocket);
|
||||
|
||||
// Duplex stream methods — `this` is tlsSocket, no bind needed
|
||||
tlsSocket._read = tlsSocketRead;
|
||||
tlsSocket._write = tlsSocketWrite;
|
||||
tlsSocket._destroy = tlsSocketDestroy;
|
||||
tlsSocket._final = tlsSocketFinal;
|
||||
|
||||
// Suppress unhandled error events until the H2 session attaches its own error handler
|
||||
tlsSocket.on("error", noop);
|
||||
|
||||
// Set TLS-like properties that connectionListener and the H2 session expect.
|
||||
// These are set on the Duplex because we cannot use a real TLSSocket here —
|
||||
// its internal state machine would conflict with upgradeDuplexToTLS.
|
||||
tlsSocket.alpnProtocol = null;
|
||||
tlsSocket.authorized = false;
|
||||
tlsSocket.encrypted = true;
|
||||
tlsSocket.server = server;
|
||||
|
||||
// Only enforce client cert verification if the server explicitly requests it.
|
||||
// tls.Server defaults _rejectUnauthorized to true, but without _requestCert
|
||||
// the server doesn't actually ask for a client cert, so verification errors
|
||||
// (e.g. UNABLE_TO_GET_ISSUER_CERT for the server's own self-signed cert) are
|
||||
// spurious and must be ignored.
|
||||
tlsSocket._requestCert = server._requestCert || false;
|
||||
tlsSocket._rejectUnauthorized = server._requestCert ? server._rejectUnauthorized : false;
|
||||
|
||||
// socket: callbacks — bind to tlsSocket since Zig calls them with native handle as `this`
|
||||
let handle: NativeHandle, events: UpgradeContextType["events"];
|
||||
try {
|
||||
// upgradeDuplexToTLS wraps rawSocket with a TLS layer in server mode (isServer: true).
|
||||
// The Zig side will:
|
||||
// 1. Read encrypted data from rawSocket via events[0..3]
|
||||
// 2. Decrypt it through the TLS engine (with ALPN negotiation for "h2")
|
||||
// 3. Call our socket callbacks below with the decrypted plaintext
|
||||
//
|
||||
// ALPNProtocols: server.ALPNProtocols is a Buffer in wire format (e.g. <Buffer 02 68 32>
|
||||
// for ["h2"]). The Zig SSLConfig expects an ArrayBuffer, so we slice the underlying buffer.
|
||||
[handle, events] = upgradeDuplexToTLS(rawSocket, {
|
||||
isServer: true,
|
||||
tls: {
|
||||
key: server.key,
|
||||
cert: server.cert,
|
||||
ca: server.ca,
|
||||
passphrase: server.passphrase,
|
||||
ALPNProtocols: server.ALPNProtocols
|
||||
? server.ALPNProtocols.buffer.slice(
|
||||
server.ALPNProtocols.byteOffset,
|
||||
server.ALPNProtocols.byteOffset + server.ALPNProtocols.byteLength,
|
||||
)
|
||||
: null,
|
||||
},
|
||||
socket: {
|
||||
open: socketOpen,
|
||||
data: socketData.bind(tlsSocket),
|
||||
end: socketEnd.bind(tlsSocket),
|
||||
drain: socketDrain.bind(tlsSocket),
|
||||
close: socketClose.bind(tlsSocket),
|
||||
error: socketError.bind(tlsSocket),
|
||||
timeout: socketTimeout.bind(tlsSocket),
|
||||
handshake: socketHandshake.bind(tlsSocket),
|
||||
},
|
||||
data: {},
|
||||
});
|
||||
} catch (e) {
|
||||
rawSocket.destroy(e as Error);
|
||||
tlsSocket.destroy(e as Error);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Store handle in _ctx (NOT on tlsSocket._handle).
|
||||
// This prevents H2FrameParser from attaching as native callback which would
|
||||
// intercept data at the Zig level and bypass our Duplex push path.
|
||||
tlsSocket._ctx.nativeHandle = handle;
|
||||
tlsSocket._ctx.events = events;
|
||||
|
||||
// Wire up the raw TCP socket to feed encrypted data into the TLS layer.
|
||||
// events[0..3] are native event handlers returned by upgradeDuplexToTLS that
|
||||
// the Zig TLS engine expects to receive data/end/drain/close through.
|
||||
rawSocket.on("data", events[0]);
|
||||
rawSocket.on("end", events[1]);
|
||||
rawSocket.on("drain", events[2]);
|
||||
rawSocket.on("close", events[3]);
|
||||
|
||||
// When the TLS socket closes (e.g. H2 session destroyed), clean up the raw socket
|
||||
// listeners to prevent memory leaks and stale callback references.
|
||||
// EventEmitter calls 'close' handlers with `this` = emitter (tlsSocket).
|
||||
tlsSocket.once("close", onTlsClose);
|
||||
return true;
|
||||
}
|
||||
|
||||
export default { upgradeRawSocketToH2 };
|
||||
@@ -73,7 +73,6 @@ const H2FrameParser = $zig("h2_frame_parser.zig", "H2FrameParserConstructor");
|
||||
const assertSettings = $newZigFunction("h2_frame_parser.zig", "jsAssertSettings", 1);
|
||||
const getPackedSettings = $newZigFunction("h2_frame_parser.zig", "jsGetPackedSettings", 1);
|
||||
const getUnpackedSettings = $newZigFunction("h2_frame_parser.zig", "jsGetUnpackedSettings", 1);
|
||||
const { upgradeRawSocketToH2 } = require("node:_http2_upgrade");
|
||||
|
||||
const sensitiveHeaders = Symbol.for("nodejs.http2.sensitiveHeaders");
|
||||
const bunHTTP2Native = Symbol.for("::bunhttp2native::");
|
||||
@@ -3882,7 +3881,6 @@ Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function (err, even
|
||||
function onErrorSecureServerSession(err, socket) {
|
||||
if (!this.emit("clientError", err, socket)) socket.destroy(err);
|
||||
}
|
||||
|
||||
function emitFrameErrorEventNT(stream, frameType, errorCode) {
|
||||
stream.emit("frameError", frameType, errorCode);
|
||||
}
|
||||
@@ -3920,15 +3918,6 @@ class Http2SecureServer extends tls.Server {
|
||||
}
|
||||
this.on("tlsClientError", onErrorSecureServerSession);
|
||||
}
|
||||
emit(event: string, ...args: any[]) {
|
||||
if (event === "connection") {
|
||||
const socket = args[0];
|
||||
if (socket && !(socket instanceof TLSSocket)) {
|
||||
return upgradeRawSocketToH2(connectionListener, this, socket);
|
||||
}
|
||||
}
|
||||
return super.emit(event, ...args);
|
||||
}
|
||||
setTimeout(ms, callback) {
|
||||
this.timeout = ms;
|
||||
if (typeof callback === "function") {
|
||||
|
||||
@@ -490,7 +490,7 @@ pub const HtmlRenderer = struct {
|
||||
const needle = "&<>\"";
|
||||
|
||||
while (true) {
|
||||
const next = bun.strings.indexOfAny(txt[i..], needle) orelse {
|
||||
const next = std.mem.indexOfAny(u8, txt[i..], needle) orelse {
|
||||
self.write(txt[i..]);
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1,7 +1 @@
|
||||
Bun.hash.wyhash("asdf", 1234n);
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/26043
|
||||
// Bun.hash.crc32 accepts optional seed parameter for incremental CRC32 computation
|
||||
let crc = 0;
|
||||
crc = Bun.hash.crc32(new Uint8Array([1, 2, 3]), crc);
|
||||
crc = Bun.hash.crc32(new Uint8Array([4, 5, 6]), crc);
|
||||
|
||||
1107
test/js/bun/git/repository.test.ts
Normal file
1107
test/js/bun/git/repository.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,604 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import {
|
||||
camelCase,
|
||||
capitalCase,
|
||||
constantCase,
|
||||
dotCase,
|
||||
kebabCase,
|
||||
noCase,
|
||||
pascalCase,
|
||||
pathCase,
|
||||
sentenceCase,
|
||||
snakeCase,
|
||||
trainCase,
|
||||
} from "change-case";
|
||||
|
||||
type CaseFn = (input: string) => string;
|
||||
|
||||
const bunFns: Record<string, CaseFn> = {
|
||||
camelCase: Bun.camelCase,
|
||||
capitalCase: Bun.capitalCase,
|
||||
constantCase: Bun.constantCase,
|
||||
dotCase: Bun.dotCase,
|
||||
kebabCase: Bun.kebabCase,
|
||||
noCase: Bun.noCase,
|
||||
pascalCase: Bun.pascalCase,
|
||||
pathCase: Bun.pathCase,
|
||||
sentenceCase: Bun.sentenceCase,
|
||||
snakeCase: Bun.snakeCase,
|
||||
trainCase: Bun.trainCase,
|
||||
};
|
||||
|
||||
const changeCaseFns: Record<string, CaseFn> = {
|
||||
camelCase,
|
||||
capitalCase,
|
||||
constantCase,
|
||||
dotCase,
|
||||
kebabCase,
|
||||
noCase,
|
||||
pascalCase,
|
||||
pathCase,
|
||||
sentenceCase,
|
||||
snakeCase,
|
||||
trainCase,
|
||||
};
|
||||
|
||||
// Comprehensive input set covering many patterns
|
||||
const testInputs = [
|
||||
// Basic words
|
||||
"test",
|
||||
"foo",
|
||||
"a",
|
||||
"",
|
||||
|
||||
// Multi-word with various separators
|
||||
"test string",
|
||||
"test_string",
|
||||
"test-string",
|
||||
"test.string",
|
||||
"test/string",
|
||||
"test\tstring",
|
||||
|
||||
// Cased inputs
|
||||
"Test String",
|
||||
"TEST STRING",
|
||||
"TestString",
|
||||
"testString",
|
||||
"TEST_STRING",
|
||||
|
||||
// Acronyms and consecutive uppercase
|
||||
"XMLParser",
|
||||
"getHTTPSURL",
|
||||
"parseJSON",
|
||||
"simpleXML",
|
||||
"PDFLoader",
|
||||
"I18N",
|
||||
"ABC",
|
||||
"ABCdef",
|
||||
"ABCDef",
|
||||
"HTMLElement",
|
||||
"innerHTML",
|
||||
"XMLHttpRequest",
|
||||
"getURLParams",
|
||||
"isHTTPS",
|
||||
"CSSStyleSheet",
|
||||
"IOError",
|
||||
"UIKit",
|
||||
|
||||
// Numbers
|
||||
"version 1.2.10",
|
||||
"TestV2",
|
||||
"test123",
|
||||
"123test",
|
||||
"test 123 value",
|
||||
"1st place",
|
||||
"v2beta1",
|
||||
"ES6Module",
|
||||
"utf8Decode",
|
||||
"base64Encode",
|
||||
"h1Element",
|
||||
"int32Array",
|
||||
"123",
|
||||
"123 456",
|
||||
"a1b2c3",
|
||||
"test0",
|
||||
|
||||
// Multiple separators / weird spacing
|
||||
"foo___bar",
|
||||
"foo---bar",
|
||||
"foo...bar",
|
||||
"foo bar",
|
||||
" leading spaces ",
|
||||
"__private",
|
||||
"--dashed--",
|
||||
"..dotted..",
|
||||
" ",
|
||||
"\t\ttabs\t\t",
|
||||
"foo_-_bar",
|
||||
"foo.-bar",
|
||||
|
||||
// All uppercase
|
||||
"FOO_BAR_BAZ",
|
||||
"ALLCAPS",
|
||||
"FOO BAR",
|
||||
"FOO-BAR",
|
||||
"FOO.BAR",
|
||||
|
||||
// Mixed case
|
||||
"fooBarBaz",
|
||||
"FooBarBaz",
|
||||
"Foo Bar",
|
||||
"MiXeD CaSe",
|
||||
"already camelCase",
|
||||
"already PascalCase",
|
||||
"already_snake_case",
|
||||
"already-kebab-case",
|
||||
"Already Capital Case",
|
||||
"ALREADY_CONSTANT_CASE",
|
||||
|
||||
// Pre-formatted cases
|
||||
"Train-Case-Input",
|
||||
"dot.case.input",
|
||||
"path/case/input",
|
||||
"Sentence case input",
|
||||
"no case input",
|
||||
|
||||
// Single characters
|
||||
"A",
|
||||
"z",
|
||||
"Z",
|
||||
"0",
|
||||
|
||||
// Real-world identifiers
|
||||
"backgroundColor",
|
||||
"border-top-color",
|
||||
"MAX_RETRY_COUNT",
|
||||
"Content-Type",
|
||||
"X-Forwarded-For",
|
||||
"user_id",
|
||||
"getUserById",
|
||||
"class_name",
|
||||
"className",
|
||||
"is_active",
|
||||
"isActive",
|
||||
"created_at",
|
||||
"createdAt",
|
||||
"HTTPSConnection",
|
||||
"myXMLParser",
|
||||
"getDBConnection",
|
||||
"setHTTPSEnabled",
|
||||
"enableSSL",
|
||||
"useGPU",
|
||||
"readCSV",
|
||||
"parseHTML",
|
||||
"toJSON",
|
||||
"fromURL",
|
||||
"isNaN",
|
||||
"toString",
|
||||
"valueOf",
|
||||
|
||||
// Column name style inputs (SQL)
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email_address",
|
||||
"phone_number",
|
||||
"order_total",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"is_deleted",
|
||||
|
||||
// Hyphenated compound words
|
||||
"well-known",
|
||||
"read-only",
|
||||
"built-in",
|
||||
"self-contained",
|
||||
|
||||
// Strings with only separators
|
||||
"---",
|
||||
"___",
|
||||
"...",
|
||||
"///",
|
||||
"-_.-_.",
|
||||
|
||||
// Unicode (basic)
|
||||
"café latte",
|
||||
"naïve résumé",
|
||||
"hello 世界",
|
||||
|
||||
// Long strings
|
||||
"this is a much longer test string with many words to convert",
|
||||
"thisIsAMuchLongerTestStringWithManyWordsToConvert",
|
||||
"THIS_IS_A_MUCH_LONGER_TEST_STRING_WITH_MANY_WORDS_TO_CONVERT",
|
||||
];
|
||||
|
||||
const allCaseNames = Object.keys(bunFns);
|
||||
|
||||
describe("case-change", () => {
|
||||
// Main compatibility matrix: every function x every input
|
||||
for (const caseName of allCaseNames) {
|
||||
const bunFn = bunFns[caseName];
|
||||
const changeCaseFn = changeCaseFns[caseName];
|
||||
|
||||
describe(caseName, () => {
|
||||
for (const input of testInputs) {
|
||||
const expected = changeCaseFn(input);
|
||||
test(`${JSON.stringify(input)} => ${JSON.stringify(expected)}`, () => {
|
||||
expect(bunFn(input)).toBe(expected);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Cross-conversion round-trips: convert from A to B, compare both implementations
|
||||
describe("cross-conversion round-trips", () => {
|
||||
const conversions = [
|
||||
"camelCase",
|
||||
"pascalCase",
|
||||
"snakeCase",
|
||||
"kebabCase",
|
||||
"constantCase",
|
||||
"noCase",
|
||||
"dotCase",
|
||||
] as const;
|
||||
const roundTripInputs = [
|
||||
"hello world",
|
||||
"fooBarBaz",
|
||||
"FOO_BAR",
|
||||
"XMLParser",
|
||||
"getHTTPSURL",
|
||||
"test_string",
|
||||
"Test String",
|
||||
"already-kebab",
|
||||
"version 1.2.10",
|
||||
];
|
||||
|
||||
for (const input of roundTripInputs) {
|
||||
for (const from of conversions) {
|
||||
for (const to of conversions) {
|
||||
const intermediate = changeCaseFns[from](input);
|
||||
const expected = changeCaseFns[to](intermediate);
|
||||
test(`${from}(${JSON.stringify(input)}) => ${to}`, () => {
|
||||
const bunIntermediate = bunFns[from](input);
|
||||
expect(bunFns[to](bunIntermediate)).toBe(expected);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Double-conversion stability: converting the output again should be idempotent
|
||||
describe("idempotency", () => {
|
||||
const idempotentInputs = ["hello world", "fooBarBaz", "FOO_BAR_BAZ", "XMLParser", "test 123", "café latte"];
|
||||
|
||||
for (const caseName of allCaseNames) {
|
||||
const bunFn = bunFns[caseName];
|
||||
const changeCaseFn = changeCaseFns[caseName];
|
||||
|
||||
for (const input of idempotentInputs) {
|
||||
test(`${caseName}(${caseName}(${JSON.stringify(input)})) is idempotent`, () => {
|
||||
const once = bunFn(input);
|
||||
const twice = bunFn(once);
|
||||
const expectedOnce = changeCaseFn(input);
|
||||
const expectedTwice = changeCaseFn(expectedOnce);
|
||||
expect(once).toBe(expectedOnce);
|
||||
expect(twice).toBe(expectedTwice);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Specific per-function expected values (hardcoded, not generated)
|
||||
describe("specific expected values", () => {
|
||||
test("camelCase", () => {
|
||||
expect(Bun.camelCase("foo bar")).toBe("fooBar");
|
||||
expect(Bun.camelCase("foo-bar")).toBe("fooBar");
|
||||
expect(Bun.camelCase("foo_bar")).toBe("fooBar");
|
||||
expect(Bun.camelCase("FOO_BAR")).toBe("fooBar");
|
||||
expect(Bun.camelCase("FooBar")).toBe("fooBar");
|
||||
expect(Bun.camelCase("fooBar")).toBe("fooBar");
|
||||
expect(Bun.camelCase("")).toBe("");
|
||||
expect(Bun.camelCase("foo")).toBe("foo");
|
||||
expect(Bun.camelCase("A")).toBe("a");
|
||||
});
|
||||
|
||||
test("pascalCase", () => {
|
||||
expect(Bun.pascalCase("foo bar")).toBe("FooBar");
|
||||
expect(Bun.pascalCase("foo-bar")).toBe("FooBar");
|
||||
expect(Bun.pascalCase("foo_bar")).toBe("FooBar");
|
||||
expect(Bun.pascalCase("FOO_BAR")).toBe("FooBar");
|
||||
expect(Bun.pascalCase("fooBar")).toBe("FooBar");
|
||||
expect(Bun.pascalCase("")).toBe("");
|
||||
expect(Bun.pascalCase("foo")).toBe("Foo");
|
||||
});
|
||||
|
||||
test("snakeCase", () => {
|
||||
expect(Bun.snakeCase("foo bar")).toBe("foo_bar");
|
||||
expect(Bun.snakeCase("fooBar")).toBe("foo_bar");
|
||||
expect(Bun.snakeCase("FooBar")).toBe("foo_bar");
|
||||
expect(Bun.snakeCase("FOO_BAR")).toBe("foo_bar");
|
||||
expect(Bun.snakeCase("foo-bar")).toBe("foo_bar");
|
||||
expect(Bun.snakeCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("kebabCase", () => {
|
||||
expect(Bun.kebabCase("foo bar")).toBe("foo-bar");
|
||||
expect(Bun.kebabCase("fooBar")).toBe("foo-bar");
|
||||
expect(Bun.kebabCase("FooBar")).toBe("foo-bar");
|
||||
expect(Bun.kebabCase("FOO_BAR")).toBe("foo-bar");
|
||||
expect(Bun.kebabCase("foo_bar")).toBe("foo-bar");
|
||||
expect(Bun.kebabCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("constantCase", () => {
|
||||
expect(Bun.constantCase("foo bar")).toBe("FOO_BAR");
|
||||
expect(Bun.constantCase("fooBar")).toBe("FOO_BAR");
|
||||
expect(Bun.constantCase("FooBar")).toBe("FOO_BAR");
|
||||
expect(Bun.constantCase("foo-bar")).toBe("FOO_BAR");
|
||||
expect(Bun.constantCase("foo_bar")).toBe("FOO_BAR");
|
||||
expect(Bun.constantCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("dotCase", () => {
|
||||
expect(Bun.dotCase("foo bar")).toBe("foo.bar");
|
||||
expect(Bun.dotCase("fooBar")).toBe("foo.bar");
|
||||
expect(Bun.dotCase("FOO_BAR")).toBe("foo.bar");
|
||||
expect(Bun.dotCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("capitalCase", () => {
|
||||
expect(Bun.capitalCase("foo bar")).toBe("Foo Bar");
|
||||
expect(Bun.capitalCase("fooBar")).toBe("Foo Bar");
|
||||
expect(Bun.capitalCase("FOO_BAR")).toBe("Foo Bar");
|
||||
expect(Bun.capitalCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("trainCase", () => {
|
||||
expect(Bun.trainCase("foo bar")).toBe("Foo-Bar");
|
||||
expect(Bun.trainCase("fooBar")).toBe("Foo-Bar");
|
||||
expect(Bun.trainCase("FOO_BAR")).toBe("Foo-Bar");
|
||||
expect(Bun.trainCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("pathCase", () => {
|
||||
expect(Bun.pathCase("foo bar")).toBe("foo/bar");
|
||||
expect(Bun.pathCase("fooBar")).toBe("foo/bar");
|
||||
expect(Bun.pathCase("FOO_BAR")).toBe("foo/bar");
|
||||
expect(Bun.pathCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("sentenceCase", () => {
|
||||
expect(Bun.sentenceCase("foo bar")).toBe("Foo bar");
|
||||
expect(Bun.sentenceCase("fooBar")).toBe("Foo bar");
|
||||
expect(Bun.sentenceCase("FOO_BAR")).toBe("Foo bar");
|
||||
expect(Bun.sentenceCase("")).toBe("");
|
||||
});
|
||||
|
||||
test("noCase", () => {
|
||||
expect(Bun.noCase("foo bar")).toBe("foo bar");
|
||||
expect(Bun.noCase("fooBar")).toBe("foo bar");
|
||||
expect(Bun.noCase("FOO_BAR")).toBe("foo bar");
|
||||
expect(Bun.noCase("FooBar")).toBe("foo bar");
|
||||
expect(Bun.noCase("")).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
// Edge cases
|
||||
describe("edge cases", () => {
|
||||
test("empty string returns empty for all functions", () => {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName]("")).toBe("");
|
||||
}
|
||||
});
|
||||
|
||||
test("single character", () => {
|
||||
for (const ch of ["a", "A", "z", "Z", "0", "9"]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](ch)).toBe(changeCaseFns[caseName](ch));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("all separators produce empty for all functions", () => {
|
||||
for (const sep of ["---", "___", "...", " ", "\t\t", "-_.-_."]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](sep)).toBe(changeCaseFns[caseName](sep));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("numbers only", () => {
|
||||
for (const input of ["123", "0", "999", "123 456", "1.2.3"]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("mixed numbers and letters", () => {
|
||||
for (const input of [
|
||||
"test123",
|
||||
"123test",
|
||||
"test 123 value",
|
||||
"1st place",
|
||||
"v2beta1",
|
||||
"a1b2c3",
|
||||
"ES6Module",
|
||||
"utf8Decode",
|
||||
"base64Encode",
|
||||
"h1Element",
|
||||
"int32Array",
|
||||
]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("consecutive separators are collapsed", () => {
|
||||
for (const input of ["foo___bar", "foo---bar", "foo...bar", "foo bar"]) {
|
||||
expect(Bun.camelCase(input)).toBe(camelCase(input));
|
||||
expect(Bun.snakeCase(input)).toBe(snakeCase(input));
|
||||
}
|
||||
});
|
||||
|
||||
test("leading and trailing separators are stripped", () => {
|
||||
for (const input of [" foo ", "__bar__", "--baz--", "..qux.."]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("unicode strings", () => {
|
||||
for (const input of ["café latte", "naïve résumé", "hello 世界"]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("acronym splitting", () => {
|
||||
// These specifically test the upper->upper+lower boundary rule
|
||||
for (const input of [
|
||||
"XMLParser",
|
||||
"HTMLElement",
|
||||
"innerHTML",
|
||||
"XMLHttpRequest",
|
||||
"getURLParams",
|
||||
"isHTTPS",
|
||||
"CSSStyleSheet",
|
||||
"IOError",
|
||||
"UIKit",
|
||||
"HTTPSConnection",
|
||||
"myXMLParser",
|
||||
"getDBConnection",
|
||||
"setHTTPSEnabled",
|
||||
"ABCDef",
|
||||
"ABCdef",
|
||||
]) {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("digit-prefix underscore in camelCase/pascalCase", () => {
|
||||
// change-case inserts _ before digit-starting words (index > 0) in camel/pascal
|
||||
const input = "version 1.2.10";
|
||||
expect(Bun.camelCase(input)).toBe(camelCase(input));
|
||||
expect(Bun.pascalCase(input)).toBe(pascalCase(input));
|
||||
// snake/kebab/etc should NOT have the _ prefix
|
||||
expect(Bun.snakeCase(input)).toBe(snakeCase(input));
|
||||
expect(Bun.kebabCase(input)).toBe(kebabCase(input));
|
||||
});
|
||||
|
||||
test("long strings", () => {
|
||||
const long =
|
||||
"this is a much longer test string with many words to convert and it keeps going and going and going";
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](long)).toBe(changeCaseFns[caseName](long));
|
||||
}
|
||||
});
|
||||
|
||||
test("repeated single word", () => {
|
||||
expect(Bun.camelCase("foo")).toBe("foo");
|
||||
expect(Bun.pascalCase("foo")).toBe("Foo");
|
||||
expect(Bun.snakeCase("foo")).toBe("foo");
|
||||
expect(Bun.kebabCase("foo")).toBe("foo");
|
||||
expect(Bun.constantCase("foo")).toBe("FOO");
|
||||
});
|
||||
|
||||
test("single uppercase word", () => {
|
||||
expect(Bun.camelCase("FOO")).toBe(camelCase("FOO"));
|
||||
expect(Bun.pascalCase("FOO")).toBe(pascalCase("FOO"));
|
||||
expect(Bun.snakeCase("FOO")).toBe(snakeCase("FOO"));
|
||||
});
|
||||
});
|
||||
|
||||
// Error handling
|
||||
describe("error handling", () => {
|
||||
for (const caseName of allCaseNames) {
|
||||
const fn = bunFns[caseName];
|
||||
|
||||
test(`${caseName}() with no arguments throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn()).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}(123) with number throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn(123)).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}(null) throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn(null)).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}(undefined) throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn(undefined)).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}({}) with object throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn({})).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}([]) with array throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn([])).toThrow();
|
||||
});
|
||||
|
||||
test(`${caseName}(true) with boolean throws`, () => {
|
||||
// @ts-expect-error
|
||||
expect(() => fn(true)).toThrow();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Ensure .length property is 1
|
||||
describe("function.length", () => {
|
||||
for (const caseName of allCaseNames) {
|
||||
test(`Bun.${caseName}.length === 1`, () => {
|
||||
expect(bunFns[caseName].length).toBe(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Stress test with generated inputs
|
||||
describe("generated inputs", () => {
|
||||
// Words joined with various separators
|
||||
const words = ["foo", "bar", "baz", "qux"];
|
||||
const separators = [" ", "_", "-", ".", "/", " ", "__", "--"];
|
||||
|
||||
for (const sep of separators) {
|
||||
const input = words.join(sep);
|
||||
test(`words joined by ${JSON.stringify(sep)}: ${JSON.stringify(input)}`, () => {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Various camelCase-style inputs
|
||||
const camelInputs = [
|
||||
"oneTwoThree",
|
||||
"OneTwoThree",
|
||||
"oneTWOThree",
|
||||
"ONETwoThree",
|
||||
"oneTwo3",
|
||||
"one2Three",
|
||||
"one23",
|
||||
"oneABCTwo",
|
||||
];
|
||||
|
||||
for (const input of camelInputs) {
|
||||
test(`camelCase-style: ${JSON.stringify(input)}`, () => {
|
||||
for (const caseName of allCaseNames) {
|
||||
expect(bunFns[caseName](input)).toBe(changeCaseFns[caseName](input));
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,161 +0,0 @@
|
||||
/**
|
||||
* All tests in this file should also run in Node.js.
|
||||
*
|
||||
* Do not add any tests that only run in Bun.
|
||||
*/
|
||||
|
||||
import { describe, test } from "node:test";
|
||||
import assert from "node:assert";
|
||||
import { Agent, createServer, request as httpRequest } from "node:http";
|
||||
import type { AddressInfo } from "node:net";
|
||||
|
||||
// Helper to make a request and get the response.
|
||||
// Uses a shared agent so that all requests go through the same TCP connection,
|
||||
// which is critical for actually testing the keep-alive / proxy-URL bug.
|
||||
function makeRequest(
|
||||
port: number,
|
||||
path: string,
|
||||
agent: Agent,
|
||||
): Promise<{ statusCode: number; body: string; url: string }> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = httpRequest({ host: "127.0.0.1", port, path, method: "GET", agent }, res => {
|
||||
let body = "";
|
||||
res.on("data", chunk => {
|
||||
body += chunk;
|
||||
});
|
||||
res.on("end", () => {
|
||||
resolve({ statusCode: res.statusCode!, body, url: path });
|
||||
});
|
||||
});
|
||||
req.on("error", reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function listenOnRandomPort(server: ReturnType<typeof createServer>): Promise<number> {
|
||||
return new Promise((resolve) => {
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const addr = server.address() as AddressInfo;
|
||||
resolve(addr.port);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe("HTTP server with proxy-style absolute URLs", () => {
|
||||
test("sequential GET requests with absolute URL paths don't hang", async () => {
|
||||
const agent = new Agent({ keepAlive: true, maxSockets: 1 });
|
||||
const server = createServer((req, res) => {
|
||||
res.writeHead(200, { "Content-Type": "text/plain" });
|
||||
res.end(req.url);
|
||||
});
|
||||
|
||||
const port = await listenOnRandomPort(server);
|
||||
|
||||
try {
|
||||
// Make 3 sequential requests with proxy-style absolute URLs
|
||||
// Before the fix, request 2 would hang because the parser entered tunnel mode
|
||||
const r1 = await makeRequest(port, "http://example.com/test1", agent);
|
||||
assert.strictEqual(r1.statusCode, 200);
|
||||
assert.ok(r1.body.includes("example.com"), `Expected body to contain "example.com", got: ${r1.body}`);
|
||||
assert.ok(r1.body.includes("/test1"), `Expected body to contain "/test1", got: ${r1.body}`);
|
||||
|
||||
const r2 = await makeRequest(port, "http://example.com/test2", agent);
|
||||
assert.strictEqual(r2.statusCode, 200);
|
||||
assert.ok(r2.body.includes("example.com"), `Expected body to contain "example.com", got: ${r2.body}`);
|
||||
assert.ok(r2.body.includes("/test2"), `Expected body to contain "/test2", got: ${r2.body}`);
|
||||
|
||||
const r3 = await makeRequest(port, "http://other.com/test3", agent);
|
||||
assert.strictEqual(r3.statusCode, 200);
|
||||
assert.ok(r3.body.includes("other.com"), `Expected body to contain "other.com", got: ${r3.body}`);
|
||||
assert.ok(r3.body.includes("/test3"), `Expected body to contain "/test3", got: ${r3.body}`);
|
||||
} finally {
|
||||
agent.destroy();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("sequential POST requests with absolute URL paths don't hang", async () => {
|
||||
const agent = new Agent({ keepAlive: true, maxSockets: 1 });
|
||||
const server = createServer((req, res) => {
|
||||
let body = "";
|
||||
req.on("data", chunk => {
|
||||
body += chunk;
|
||||
});
|
||||
req.on("end", () => {
|
||||
res.writeHead(200, { "Content-Type": "text/plain" });
|
||||
res.end(`${req.method} ${req.url} body=${body}`);
|
||||
});
|
||||
});
|
||||
|
||||
const port = await listenOnRandomPort(server);
|
||||
|
||||
try {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
const result = await new Promise<{ statusCode: number; body: string }>((resolve, reject) => {
|
||||
const req = httpRequest(
|
||||
{
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
path: `http://example.com/post${i}`,
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "text/plain" },
|
||||
agent,
|
||||
},
|
||||
res => {
|
||||
let body = "";
|
||||
res.on("data", chunk => {
|
||||
body += chunk;
|
||||
});
|
||||
res.on("end", () => {
|
||||
resolve({ statusCode: res.statusCode!, body });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on("error", reject);
|
||||
req.write(`data${i}`);
|
||||
req.end();
|
||||
});
|
||||
assert.strictEqual(result.statusCode, 200);
|
||||
assert.ok(result.body.includes(`/post${i}`), `Expected body to contain "/post${i}", got: ${result.body}`);
|
||||
assert.ok(result.body.includes(`body=data${i}`), `Expected body to contain "body=data${i}", got: ${result.body}`);
|
||||
}
|
||||
} finally {
|
||||
agent.destroy();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("mixed normal and proxy-style URLs work sequentially", async () => {
|
||||
const agent = new Agent({ keepAlive: true, maxSockets: 1 });
|
||||
const server = createServer((req, res) => {
|
||||
res.writeHead(200, { "Content-Type": "text/plain" });
|
||||
res.end(req.url);
|
||||
});
|
||||
|
||||
const port = await listenOnRandomPort(server);
|
||||
|
||||
try {
|
||||
// Mix of normal and proxy-style URLs
|
||||
const r1 = await makeRequest(port, "/normal1", agent);
|
||||
assert.strictEqual(r1.statusCode, 200);
|
||||
assert.ok(r1.body.includes("/normal1"), `Expected body to contain "/normal1", got: ${r1.body}`);
|
||||
|
||||
const r2 = await makeRequest(port, "http://example.com/proxy1", agent);
|
||||
assert.strictEqual(r2.statusCode, 200);
|
||||
assert.ok(r2.body.includes("example.com"), `Expected body to contain "example.com", got: ${r2.body}`);
|
||||
assert.ok(r2.body.includes("/proxy1"), `Expected body to contain "/proxy1", got: ${r2.body}`);
|
||||
|
||||
const r3 = await makeRequest(port, "/normal2", agent);
|
||||
assert.strictEqual(r3.statusCode, 200);
|
||||
assert.ok(r3.body.includes("/normal2"), `Expected body to contain "/normal2", got: ${r3.body}`);
|
||||
|
||||
const r4 = await makeRequest(port, "http://other.com/proxy2", agent);
|
||||
assert.strictEqual(r4.statusCode, 200);
|
||||
assert.ok(r4.body.includes("other.com"), `Expected body to contain "other.com", got: ${r4.body}`);
|
||||
assert.ok(r4.body.includes("/proxy2"), `Expected body to contain "/proxy2", got: ${r4.body}`);
|
||||
} finally {
|
||||
agent.destroy();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,26 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, nodeExe } from "harness";
|
||||
import { join } from "node:path";
|
||||
|
||||
describe("HTTP server with proxy-style absolute URLs", () => {
|
||||
test("tests should run on node.js", async () => {
|
||||
await using process = Bun.spawn({
|
||||
cmd: [nodeExe(), "--test", join(import.meta.dir, "node-http-proxy-url.node.mts")],
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "ignore",
|
||||
env: bunEnv,
|
||||
});
|
||||
expect(await process.exited).toBe(0);
|
||||
});
|
||||
test("tests should run on bun", async () => {
|
||||
await using process = Bun.spawn({
|
||||
cmd: [bunExe(), "test", join(import.meta.dir, "node-http-proxy-url.node.mts")],
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "ignore",
|
||||
env: bunEnv,
|
||||
});
|
||||
expect(await process.exited).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -1,428 +0,0 @@
|
||||
/**
|
||||
* Tests for the net.Server → Http2SecureServer upgrade path
|
||||
* (upgradeRawSocketToH2 in _http2_upgrade.ts).
|
||||
*
|
||||
* This pattern is used by http2-wrapper, crawlee, and other libraries that
|
||||
* accept raw TCP connections and upgrade them to HTTP/2 via
|
||||
* `h2Server.emit('connection', rawSocket)`.
|
||||
*
|
||||
* Works with both:
|
||||
* bun bd test test/js/node/http2/node-http2-upgrade.test.ts
|
||||
* node --experimental-strip-types --test test/js/node/http2/node-http2-upgrade.test.ts
|
||||
*/
|
||||
import assert from "node:assert";
|
||||
import fs from "node:fs";
|
||||
import http2 from "node:http2";
|
||||
import net from "node:net";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, test } from "node:test";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const FIXTURES_PATH = path.join(__dirname, "..", "test", "fixtures", "keys");
|
||||
|
||||
const TLS = {
|
||||
key: fs.readFileSync(path.join(FIXTURES_PATH, "agent1-key.pem")),
|
||||
cert: fs.readFileSync(path.join(FIXTURES_PATH, "agent1-cert.pem")),
|
||||
ALPNProtocols: ["h2"],
|
||||
};
|
||||
|
||||
function createUpgradeServer(
|
||||
handler: (req: http2.Http2ServerRequest, res: http2.Http2ServerResponse) => void,
|
||||
opts: { onSession?: (session: http2.Http2Session) => void } = {},
|
||||
): Promise<{ netServer: net.Server; h2Server: http2.Http2SecureServer; port: number }> {
|
||||
return new Promise(resolve => {
|
||||
const h2Server = http2.createSecureServer(TLS, handler);
|
||||
h2Server.on("error", () => {});
|
||||
if (opts.onSession) h2Server.on("session", opts.onSession);
|
||||
|
||||
const netServer = net.createServer(socket => {
|
||||
h2Server.emit("connection", socket);
|
||||
});
|
||||
|
||||
netServer.listen(0, "127.0.0.1", () => {
|
||||
resolve({ netServer, h2Server, port: (netServer.address() as net.AddressInfo).port });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function connectClient(port: number): http2.ClientHttp2Session {
|
||||
const client = http2.connect(`https://127.0.0.1:${port}`, { rejectUnauthorized: false });
|
||||
client.on("error", () => {});
|
||||
return client;
|
||||
}
|
||||
|
||||
function request(
|
||||
client: http2.ClientHttp2Session,
|
||||
method: string,
|
||||
reqPath: string,
|
||||
body?: string,
|
||||
): Promise<{ status: number; headers: http2.IncomingHttpHeaders; body: string }> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = client.request({ ":method": method, ":path": reqPath });
|
||||
let responseBody = "";
|
||||
let responseHeaders: http2.IncomingHttpHeaders = {};
|
||||
req.on("response", hdrs => {
|
||||
responseHeaders = hdrs;
|
||||
});
|
||||
req.setEncoding("utf8");
|
||||
req.on("data", (chunk: string) => {
|
||||
responseBody += chunk;
|
||||
});
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
status: responseHeaders[":status"] as unknown as number,
|
||||
headers: responseHeaders,
|
||||
body: responseBody,
|
||||
});
|
||||
});
|
||||
req.on("error", reject);
|
||||
if (body !== undefined) {
|
||||
req.end(body);
|
||||
} else {
|
||||
req.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("HTTP/2 upgrade via net.Server", () => {
|
||||
let servers: { netServer: net.Server }[] = [];
|
||||
let clients: http2.ClientHttp2Session[] = [];
|
||||
|
||||
afterEach(() => {
|
||||
for (const c of clients) c.close();
|
||||
for (const s of servers) s.netServer.close();
|
||||
clients = [];
|
||||
servers = [];
|
||||
});
|
||||
|
||||
test("GET request succeeds with 200 and custom headers", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(200, { "x-upgrade-test": "yes" });
|
||||
res.end("hello from upgraded server");
|
||||
});
|
||||
servers.push(srv);
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
clients.push(client);
|
||||
|
||||
const result = await request(client, "GET", "/");
|
||||
assert.strictEqual(result.status, 200);
|
||||
assert.strictEqual(result.headers["x-upgrade-test"], "yes");
|
||||
assert.strictEqual(result.body, "hello from upgraded server");
|
||||
});
|
||||
|
||||
test("POST request with body echoed back", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
let body = "";
|
||||
_req.on("data", (chunk: string) => {
|
||||
body += chunk;
|
||||
});
|
||||
_req.on("end", () => {
|
||||
res.writeHead(200);
|
||||
res.end("echo:" + body);
|
||||
});
|
||||
});
|
||||
servers.push(srv);
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
clients.push(client);
|
||||
|
||||
const result = await request(client, "POST", "/echo", "test payload");
|
||||
assert.strictEqual(result.status, 200);
|
||||
assert.strictEqual(result.body, "echo:test payload");
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — multiple requests on one connection", () => {
|
||||
test("three sequential requests share the same session", async () => {
|
||||
let count = 0;
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
count++;
|
||||
res.writeHead(200);
|
||||
res.end(String(count));
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
|
||||
const r1 = await request(client, "GET", "/");
|
||||
const r2 = await request(client, "GET", "/");
|
||||
const r3 = await request(client, "GET", "/");
|
||||
|
||||
assert.strictEqual(r1.body, "1");
|
||||
assert.strictEqual(r2.body, "2");
|
||||
assert.strictEqual(r3.body, "3");
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — session event", () => {
|
||||
test("h2Server emits session event", async () => {
|
||||
let sessionFired = false;
|
||||
const srv = await createUpgradeServer(
|
||||
(_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
},
|
||||
{
|
||||
onSession: () => {
|
||||
sessionFired = true;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
|
||||
await request(client, "GET", "/");
|
||||
|
||||
assert.strictEqual(sessionFired, true);
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — concurrent clients", () => {
|
||||
test("two clients get independent sessions", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end(_req.url);
|
||||
});
|
||||
|
||||
const c1 = connectClient(srv.port);
|
||||
const c2 = connectClient(srv.port);
|
||||
|
||||
const [r1, r2] = await Promise.all([request(c1, "GET", "/from-client-1"), request(c2, "GET", "/from-client-2")]);
|
||||
|
||||
assert.strictEqual(r1.body, "/from-client-1");
|
||||
assert.strictEqual(r2.body, "/from-client-2");
|
||||
|
||||
c1.close();
|
||||
c2.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — socket close ordering", () => {
|
||||
test("no crash when rawSocket.destroy() precedes session.close()", async () => {
|
||||
let rawSocket: net.Socket | undefined;
|
||||
let h2Session: http2.Http2Session | undefined;
|
||||
|
||||
const h2Server = http2.createSecureServer(TLS, (_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("done");
|
||||
});
|
||||
h2Server.on("error", () => {});
|
||||
h2Server.on("session", s => {
|
||||
h2Session = s;
|
||||
});
|
||||
|
||||
const netServer = net.createServer(socket => {
|
||||
rawSocket = socket;
|
||||
h2Server.emit("connection", socket);
|
||||
});
|
||||
|
||||
const port = await new Promise<number>(resolve => {
|
||||
netServer.listen(0, "127.0.0.1", () => resolve((netServer.address() as net.AddressInfo).port));
|
||||
});
|
||||
|
||||
const client = connectClient(port);
|
||||
await request(client, "GET", "/");
|
||||
|
||||
const socketClosed = Promise.withResolvers<void>();
|
||||
rawSocket!.once("close", () => socketClosed.resolve());
|
||||
rawSocket!.destroy();
|
||||
await socketClosed.promise;
|
||||
if (h2Session) h2Session.close();
|
||||
|
||||
client.close();
|
||||
netServer.close();
|
||||
});
|
||||
|
||||
test("no crash when session.close() precedes rawSocket.destroy()", async () => {
|
||||
let rawSocket: net.Socket | undefined;
|
||||
let h2Session: http2.Http2Session | undefined;
|
||||
|
||||
const h2Server = http2.createSecureServer(TLS, (_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("done");
|
||||
});
|
||||
h2Server.on("error", () => {});
|
||||
h2Server.on("session", s => {
|
||||
h2Session = s;
|
||||
});
|
||||
|
||||
const netServer = net.createServer(socket => {
|
||||
rawSocket = socket;
|
||||
h2Server.emit("connection", socket);
|
||||
});
|
||||
|
||||
const port = await new Promise<number>(resolve => {
|
||||
netServer.listen(0, "127.0.0.1", () => resolve((netServer.address() as net.AddressInfo).port));
|
||||
});
|
||||
|
||||
const client = connectClient(port);
|
||||
await request(client, "GET", "/");
|
||||
|
||||
if (h2Session) h2Session.close();
|
||||
const socketClosed = Promise.withResolvers<void>();
|
||||
rawSocket!.once("close", () => socketClosed.resolve());
|
||||
rawSocket!.destroy();
|
||||
await socketClosed.promise;
|
||||
|
||||
client.close();
|
||||
netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — ALPN negotiation", () => {
|
||||
test("alpnProtocol is h2 after upgrade", async () => {
|
||||
let observedAlpn: string | undefined;
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
const session = _req.stream.session;
|
||||
if (session && session.socket) {
|
||||
observedAlpn = (session.socket as any).alpnProtocol;
|
||||
}
|
||||
res.writeHead(200);
|
||||
res.end("alpn-ok");
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
await request(client, "GET", "/");
|
||||
|
||||
assert.strictEqual(observedAlpn, "h2");
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — varied status codes", () => {
|
||||
test("404 response with custom header", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(404, { "x-reason": "not-found" });
|
||||
res.end("not found");
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
const result = await request(client, "GET", "/missing");
|
||||
|
||||
assert.strictEqual(result.status, 404);
|
||||
assert.strictEqual(result.headers["x-reason"], "not-found");
|
||||
assert.strictEqual(result.body, "not found");
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
|
||||
test("302 redirect response", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(302, { location: "/" });
|
||||
res.end();
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
const result = await request(client, "GET", "/redirect");
|
||||
|
||||
assert.strictEqual(result.status, 302);
|
||||
assert.strictEqual(result.headers["location"], "/");
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
|
||||
test("large response body (8KB) through upgraded socket", async () => {
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("x".repeat(8192));
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
const result = await request(client, "GET", "/large");
|
||||
|
||||
assert.strictEqual(result.body.length, 8192);
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — client disconnect mid-response", () => {
|
||||
test("server does not crash when client destroys stream early", async () => {
|
||||
const streamClosed = Promise.withResolvers<void>();
|
||||
|
||||
const srv = await createUpgradeServer((_req, res) => {
|
||||
res.writeHead(200);
|
||||
const interval = setInterval(() => {
|
||||
if (res.destroyed || res.writableEnded) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
res.write("chunk\n");
|
||||
}, 5);
|
||||
_req.stream.on("close", () => {
|
||||
clearInterval(interval);
|
||||
streamClosed.resolve();
|
||||
});
|
||||
});
|
||||
|
||||
const client = connectClient(srv.port);
|
||||
|
||||
const streamReady = Promise.withResolvers<http2.ClientHttp2Stream>();
|
||||
const req = client.request({ ":method": "GET", ":path": "/" });
|
||||
req.on("response", () => streamReady.resolve(req));
|
||||
req.on("error", () => {});
|
||||
|
||||
const stream = await streamReady.promise;
|
||||
stream.destroy();
|
||||
|
||||
await streamClosed.promise;
|
||||
|
||||
client.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP/2 upgrade — independent upgrade per connection", () => {
|
||||
test("three clients produce three distinct sessions", async () => {
|
||||
const sessions: http2.Http2Session[] = [];
|
||||
|
||||
const srv = await createUpgradeServer(
|
||||
(_req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
},
|
||||
{ onSession: s => sessions.push(s) },
|
||||
);
|
||||
|
||||
const c1 = connectClient(srv.port);
|
||||
const c2 = connectClient(srv.port);
|
||||
const c3 = connectClient(srv.port);
|
||||
|
||||
await Promise.all([request(c1, "GET", "/"), request(c2, "GET", "/"), request(c3, "GET", "/")]);
|
||||
|
||||
assert.strictEqual(sessions.length, 3);
|
||||
assert.notStrictEqual(sessions[0], sessions[1]);
|
||||
assert.notStrictEqual(sessions[1], sessions[2]);
|
||||
|
||||
c1.close();
|
||||
c2.close();
|
||||
c3.close();
|
||||
srv.netServer.close();
|
||||
});
|
||||
});
|
||||
if (typeof Bun !== "undefined") {
|
||||
describe("Node.js compatibility", () => {
|
||||
test("tests should run on node.js", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [Bun.which("node") || "node", "--test", import.meta.filename],
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "ignore",
|
||||
});
|
||||
assert.strictEqual(await proc.exited, 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const common = require('../common');
|
||||
const fixtures = require('../common/fixtures');
|
||||
if (!common.hasCrypto)
|
||||
common.skip('missing crypto');
|
||||
const assert = require('assert');
|
||||
const net = require('net');
|
||||
const h2 = require('http2');
|
||||
|
||||
const tlsOptions = {
|
||||
key: fixtures.readKey('agent1-key.pem'),
|
||||
cert: fixtures.readKey('agent1-cert.pem'),
|
||||
ALPNProtocols: ['h2']
|
||||
};
|
||||
|
||||
// Create a net server that upgrades sockets to HTTP/2 manually, handles the
|
||||
// request, and then shuts down via a short socket timeout and a longer H2 session
|
||||
// timeout. This is an unconventional way to shut down a session (the underlying
|
||||
// socket closing first) but it should work - critically, it shouldn't segfault
|
||||
// (as it did until Node v20.5.1).
|
||||
|
||||
let serverRawSocket;
|
||||
let serverH2Session;
|
||||
|
||||
const netServer = net.createServer((socket) => {
|
||||
serverRawSocket = socket;
|
||||
h2Server.emit('connection', socket);
|
||||
});
|
||||
|
||||
const h2Server = h2.createSecureServer(tlsOptions, (req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end();
|
||||
});
|
||||
|
||||
h2Server.on('session', (session) => {
|
||||
serverH2Session = session;
|
||||
});
|
||||
|
||||
netServer.listen(0, common.mustCall(() => {
|
||||
const proxyClient = h2.connect(`https://localhost:${netServer.address().port}`, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
|
||||
proxyClient.on('error', () => {});
|
||||
proxyClient.on('close', common.mustCall(() => {
|
||||
netServer.close();
|
||||
}));
|
||||
|
||||
const req = proxyClient.request({
|
||||
':method': 'GET',
|
||||
':path': '/'
|
||||
});
|
||||
|
||||
req.on('error', () => {});
|
||||
req.on('response', common.mustCall((response) => {
|
||||
assert.strictEqual(response[':status'], 200);
|
||||
|
||||
// Asynchronously shut down the server's connections after the response,
|
||||
// but not in the order it typically expects:
|
||||
setTimeout(() => {
|
||||
serverRawSocket.destroy();
|
||||
|
||||
setTimeout(() => {
|
||||
serverH2Session.close();
|
||||
}, 10);
|
||||
}, 10);
|
||||
}));
|
||||
}));
|
||||
Reference in New Issue
Block a user