Compare commits

..

2 Commits

Author SHA1 Message Date
Ashcon Partovi
90d6a3f7bd Experimental vendor regression tests 2025-04-11 18:22:59 -07:00
Ashcon Partovi
a2df5195e0 Remove vendored express tests 2025-04-11 18:22:01 -07:00
989 changed files with 54497 additions and 64004 deletions

View File

@@ -65,27 +65,21 @@ The Zig files implement the native functionality:
```zig
// Example: TextDecoder.zig
pub const TextDecoder = struct {
// Expose generated bindings as `js` namespace with trait conversion methods
pub const js = JSC.Codegen.JSTextDecoder;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// Internal state
encoding: []const u8,
fatal: bool,
ignoreBOM: bool,
// Use generated bindings
pub usingnamespace JSC.Codegen.JSTextDecoder;
pub usingnamespace bun.New(@This());
// Constructor implementation - note use of globalObject
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*TextDecoder {
// Implementation
return bun.new(TextDecoder, .{
// Fields
});
}
// Prototype methods - note return type includes JSError
@@ -107,22 +101,22 @@ pub const TextDecoder = struct {
}
// Cleanup - note standard pattern of using deinit/deref
fn deinit(this: *TextDecoder) void {
pub fn deinit(this: *TextDecoder) void {
// Release any retained resources
// Free the pointer at the end.
bun.destroy(this);
}
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
pub fn finalize(this: *TextDecoder) void {
this.deinit();
// Or sometimes this is used to free memory instead
bun.default_allocator.destroy(this);
}
};
```
Key components in the Zig file:
- The struct containing native state
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
- `usingnamespace JSC.Codegen.JS<ClassName>` to include generated code
- `usingnamespace bun.New(@This())` for object creation helpers
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
- Consistent use of `globalObject` parameter name instead of `ctx`
- Methods matching the JavaScript interface
@@ -401,16 +395,12 @@ To create a new class binding in Bun:
2. **Implement the native functionality** in a `.zig` file:
```zig
pub const MyClass = struct {
// Generated bindings
pub const js = JSC.Codegen.JSMyClass;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// State
value: []const u8,
pub const new = bun.TrivialNew(@This());
// Generated bindings
pub usingnamespace JSC.Codegen.JSMyClass;
pub usingnamespace bun.New(@This());
// Constructor
pub fn constructor(
@@ -442,7 +432,7 @@ To create a new class binding in Bun:
pub fn finalize(this: *MyClass) void {
this.deinit();
bun.destroy(this);
bun.default_allocator.destroy(this);
}
};
```
@@ -495,4 +485,4 @@ For each Zig class, the system generates:
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.

View File

@@ -1,8 +0,0 @@
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
# avoid mucking up blames.
#
# To configure git to use this, run:
#
# git config blame.ignoreRevsFile .git-blame-ignore-revs
#
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)

View File

@@ -4,7 +4,6 @@ on:
push:
paths:
- "docs/**"
- "packages/bun-types/**.d.ts"
- "CONTRIBUTING.md"
branches:
- main

View File

@@ -5,7 +5,8 @@ on:
workflow_dispatch:
env:
BUN_VERSION: "1.2.10"
BUN_VERSION: "1.2.0"
OXLINT_VERSION: "0.15.0"
jobs:
lint-js:
@@ -18,4 +19,4 @@ jobs:
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Lint
run: bun lint
run: bunx oxlint --config oxlint.json --quiet --format github

1
.gitignore vendored
View File

@@ -153,7 +153,6 @@ test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
codegen-for-zig-team.tar.gz
# Dependencies
/vendor

View File

@@ -30,13 +30,11 @@
"zig.initialSetupDone": true,
"zig.buildOption": "build",
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"],
"zig.zls.buildOnSaveStep": "check",
// "zig.zls.enableBuildOnSave": true,
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {
@@ -146,8 +144,6 @@
"*.mdc": "markdown",
"array": "cpp",
"ios": "cpp",
"oxlint.json": "jsonc",
"bun.lock": "jsonc",
},
"C_Cpp.files.exclude": {
"**/.vscode": true,

2
LATEST
View File

@@ -1 +1 @@
1.2.10
1.2.9

View File

@@ -1,14 +0,0 @@
import { bench, run } from "../runner.mjs";
const url = "http://localhost:3000/";
const clonable = new Request(url);
bench("request.clone().method", () => {
return clonable.clone().method;
});
bench("new Request(url).method", () => {
return new Request(url).method;
});
await run();

264
build.zig
View File

@@ -4,7 +4,7 @@ const builtin = @import("builtin");
const Build = std.Build;
const Step = Build.Step;
const Compile = Step.Compile;
const LazyPath = Build.LazyPath;
const LazyPath = Step.LazyPath;
const Target = std.Target;
const ResolvedTarget = std.Build.ResolvedTarget;
const CrossTarget = std.zig.CrossTarget;
@@ -18,21 +18,21 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
const pathRel = fs.path.relative;
/// When updating this, make sure to adjust SetupZig.cmake
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
const recommended_zig_version = "0.14.0";
// comptime {
// if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
// @compileError(
// "" ++
// "Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
// builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
// "CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
// "upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
// "comment this error out.",
// );
// }
// }
comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
"Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
"CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
"upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
"comment this error out.",
);
}
}
const zero_sha = "0000000000000000000000000000000000000000";
@@ -93,7 +93,6 @@ const BunBuildOptions = struct {
opts.addOption(bool, "baseline", this.isBaseline());
opts.addOption(bool, "enable_logs", this.enable_logs);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
const mod = opts.createModule();
this.cached_options_module = mod;
@@ -199,7 +198,10 @@ pub fn build(b: *Build) !void {
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
b.reference_trace = b.reference_trace orelse 32;
b.reference_trace = ref_trace: {
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24;
break :ref_trace if (trace == 0) null else trace;
};
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
@@ -333,22 +335,6 @@ pub fn build(b: *Build) !void {
b.default_step.dependOn(step);
}
// zig build watch
// const enable_watch_step = b.option(bool, "watch_step", "Enable the watch step. This reads more files so it is off by default") orelse false;
// if (no_llvm or enable_watch_step) {
// self_hosted_watch.selfHostedExeBuild(b, &build_options) catch @panic("OOM");
// }
// zig build check-debug
{
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
}, &.{.Debug});
}
// zig build check-all
{
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
@@ -402,22 +388,7 @@ pub fn build(b: *Build) !void {
// zig build translate-c-headers
{
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
for ([_]TargetDescription{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}) |t| {
const resolved = t.resolveTarget(b);
step.dependOn(
&b.addInstallFile(getTranslateC(b, resolved, .Debug), b.fmt("translated-c-headers/{s}.zig", .{
resolved.result.zigTriple(b.allocator) catch @panic("OOM"),
})).step,
);
}
step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step);
}
// zig build enum-extractor
@@ -434,32 +405,23 @@ pub fn build(b: *Build) !void {
}
}
const TargetDescription = struct {
os: OperatingSystem,
arch: Arch,
musl: bool = false,
fn resolveTarget(desc: TargetDescription, b: *Build) std.Build.ResolvedTarget {
return b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(desc.os),
.cpu_arch = desc.arch,
.cpu_model = getCpuModel(desc.os, desc.arch) orelse .determined_by_arch_os,
.os_version_min = getOSVersionMin(desc.os),
.glibc_version = if (desc.musl) null else getOSGlibCVersion(desc.os),
});
}
};
fn addMultiCheck(
pub fn addMultiCheck(
b: *Build,
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const TargetDescription,
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
optimize: []const std.builtin.OptimizeMode,
) void {
for (to_check) |check| {
for (optimize) |mode| {
const check_target = check.resolveTarget(b);
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = if (check.musl) null else getOSGlibCVersion(check.os),
});
var options: BunBuildOptions = .{
.target = check_target,
.os = check.os,
@@ -483,13 +445,7 @@ fn addMultiCheck(
}
}
fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) LazyPath {
const target = b.resolveTargetQuery(q: {
var query = initial_target.query;
if (query.os_tag == .windows)
query.abi = .gnu;
break :q query;
});
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
const translate_c = b.addTranslateC(.{
.root_source_file = b.path("src/c-headers-for-zig.h"),
.target = target,
@@ -505,72 +461,33 @@ fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize:
const str, const value = entry;
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
}
if (target.result.os.tag == .windows) {
// translate-c is unable to translate the unsuffixed windows functions
// like `SetCurrentDirectory` since they are defined with an odd macro
// that translate-c doesn't handle.
//
// #define SetCurrentDirectory __MINGW_NAME_AW(SetCurrentDirectory)
//
// In these cases, it's better to just reference the underlying function
// directly: SetCurrentDirectoryW. To make the error better, a post
// processing step is applied to the translate-c file.
//
// Additionally, this step makes it so that decls like NTSTATUS and
// HANDLE point to the standard library structures.
const helper_exe = b.addExecutable(.{
.name = "process_windows_translate_c",
.root_module = b.createModule(.{
.root_source_file = b.path("src/codegen/process_windows_translate_c.zig"),
.target = b.graph.host,
.optimize = .Debug,
}),
});
const in = translate_c.getOutput();
const run = b.addRunArtifact(helper_exe);
run.addFileArg(in);
const out = run.addOutputFileArg("c-headers-for-zig.zig");
return out;
}
return translate_c.getOutput();
return translate_c;
}
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
// Create `@import("bun")`, containing most of Bun's code.
const bun = b.createModule(.{
.root_source_file = b.path("src/bun.zig"),
});
bun.addImport("bun", bun); // allow circular "bun" import
addInternalImports(b, bun, opts);
const root = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
// Root module gets compilation flags. Forwarded as default to dependencies.
.target = opts.target,
.optimize = opts.optimize,
});
root.addImport("bun", bun);
const obj = b.addObject(.{
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
.root_module = root,
.root_source_file = switch (opts.os) {
.wasm => b.path("root_wasm.zig"),
else => b.path("src/main.zig"),
// else => b.path("root_css.zig"),
},
.target = opts.target,
.optimize = opts.optimize,
.use_llvm = !opts.no_llvm,
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
// https://github.com/ziglang/zig/issues/17430
.pic = true,
.omit_frame_pointer = false,
.strip = false, // stripped at the end
});
configureObj(b, opts, obj);
return obj;
}
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Flags on root module get used for the compilation
obj.root_module.omit_frame_pointer = false;
obj.root_module.strip = false; // stripped at the end
// https://github.com/ziglang/zig/issues/17430
obj.root_module.pic = true;
// Object options
obj.use_llvm = !opts.no_llvm;
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
if (opts.enable_asan) {
if (@hasField(Build.Module, "sanitize_address")) {
obj.root_module.sanitize_address = true;
@@ -581,6 +498,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
}
obj.bundle_compiler_rt = false;
obj.bundle_ubsan_rt = false;
obj.root_module.omit_frame_pointer = false;
// Link libc
if (opts.os != .wasm) {
@@ -590,7 +508,6 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Disable stack probing on x86 so we don't need to include compiler_rt
if (opts.arch.isX86()) {
// TODO: enable on debug please.
obj.root_module.stack_check = false;
obj.root_module.stack_protector = false;
}
@@ -605,18 +522,15 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
obj.root_module.valgrind = true;
}
}
addInternalPackages(b, obj, opts);
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
const translate_c = getTranslateC(b, opts.target, opts.optimize);
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
}
const ObjectFormat = enum {
/// Emitting LLVM bc files could allow a stronger LTO pass, however it
/// doesn't yet work. It is left accessible with `-Dobj_format=bc` or in
/// CMake with `-DZIG_OBJECT_FORMAT=bc`.
///
/// To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
/// LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
/// Change to "bc" to experiment, "Invalid record" means it is not valid output.
bc,
/// Emit a .o / .obj file for the bun-zig object.
obj,
};
@@ -646,21 +560,16 @@ fn exists(path: []const u8) bool {
return true;
}
fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
const os = opts.os;
mod.addImport("build_options", opts.buildOptionsModule(b));
const translate_c = getTranslateC(b, opts.target, opts.optimize);
mod.addImport("translated-c-headers", b.createModule(.{ .root_source_file = translate_c }));
const zlib_internal_path = switch (os) {
.windows => "src/deps/zlib.win32.zig",
.linux, .mac => "src/deps/zlib.posix.zig",
else => null,
};
if (zlib_internal_path) |path| {
mod.addAnonymousImport("zlib-internal", .{
obj.root_module.addAnonymousImport("zlib-internal", .{
.root_source_file = b.path(path),
});
}
@@ -670,7 +579,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
.windows => "src/async/windows_event_loop.zig",
else => "src/async/stub_event_loop.zig",
};
mod.addAnonymousImport("async", .{
obj.root_module.addAnonymousImport("async", .{
.root_source_file = b.path(async_path),
});
@@ -718,7 +627,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
entry.import
else
entry.file;
mod.addAnonymousImport(import_path, .{
obj.root_module.addAnonymousImport(import_path, .{
.root_source_file = .{ .cwd_relative = path },
});
}
@@ -728,37 +637,16 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
.{ .import = "completions-fish", .file = b.path("completions/bun.fish") },
}) |entry| {
mod.addAnonymousImport(entry.import, .{
obj.root_module.addAnonymousImport(entry.import, .{
.root_source_file = entry.file,
});
}
if (os == .windows) {
mod.addAnonymousImport("bun_shim_impl.exe", .{
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
.root_source_file = opts.windowsShim(b).exe.getEmittedBin(),
});
}
// Finally, make it so all modules share the same import table.
propagateImports(mod) catch @panic("OOM");
}
/// Makes all imports of `source_mod` visible to all of its dependencies.
/// Does not replace existing imports.
fn propagateImports(source_mod: *Module) !void {
var seen = std.AutoHashMap(*Module, void).init(source_mod.owner.graph.arena);
defer seen.deinit();
var queue = std.ArrayList(*Module).init(source_mod.owner.graph.arena);
defer queue.deinit();
try queue.appendSlice(source_mod.import_table.values());
while (queue.pop()) |mod| {
if ((try seen.getOrPut(mod)).found_existing) continue;
try queue.appendSlice(mod.import_table.values());
for (source_mod.import_table.keys(), source_mod.import_table.values()) |k, v|
if (mod.import_table.get(k) == null)
mod.addImport(k, v);
}
}
fn validateGeneratedPath(path: []const u8) void {
@@ -787,34 +675,30 @@ const WindowsShim = struct {
const exe = b.addExecutable(.{
.name = "bun_shim_impl",
.root_module = b.createModule(.{
.root_source_file = path,
.target = target,
.optimize = .ReleaseFast,
.unwind_tables = .none,
.omit_frame_pointer = true,
.strip = true,
.sanitize_thread = false,
.single_threaded = true,
.link_libc = false,
}),
.linkage = .static,
.root_source_file = path,
.target = target,
.optimize = .ReleaseFast,
.use_llvm = true,
.use_lld = true,
.unwind_tables = .none,
.omit_frame_pointer = true,
.strip = true,
.linkage = .static,
.sanitize_thread = false,
.single_threaded = true,
.link_libc = false,
});
const dbg = b.addExecutable(.{
.name = "bun_shim_debug",
.root_module = b.createModule(.{
.root_source_file = path,
.target = target,
.optimize = .Debug,
.single_threaded = true,
.link_libc = false,
}),
.linkage = .static,
.root_source_file = path,
.target = target,
.optimize = .Debug,
.use_llvm = true,
.use_lld = true,
.linkage = .static,
.single_threaded = true,
.link_libc = false,
});
return .{ .exe = exe, .dbg = dbg };

View File

@@ -27,8 +27,10 @@
},
"packages/bun-types": {
"name": "bun-types",
"version": "1.2.5",
"dependencies": {
"@types/node": "*",
"@types/ws": "*",
},
"devDependencies": {
"@biomejs/biome": "^1.5.3",
@@ -164,6 +166,8 @@
"@types/semver": ["@types/semver@7.5.8", "", {}, "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ=="],
"@types/ws": ["@types/ws@8.5.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w=="],
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@7.16.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/type-utils": "7.16.1", "@typescript-eslint/utils": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "@typescript-eslint/parser": "^7.0.0", "eslint": "^8.56.0" } }, "sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A=="],
"@typescript-eslint/parser": ["@typescript-eslint/parser@7.16.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/types": "7.16.1", "@typescript-eslint/typescript-estree": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA=="],
@@ -912,6 +916,8 @@
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
"@types/ws/@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
"@typescript-eslint/visitor-keys/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
@@ -1002,6 +1008,8 @@
"@definitelytyped/utils/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
"@types/ws/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
"are-we-there-yet/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],

View File

@@ -423,7 +423,7 @@ function(register_command)
# libbun-profile.a is now over 5gb in size, compress it first
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -6 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
else()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})

View File

@@ -1089,7 +1089,6 @@ set(BUN_DEPENDENCIES
BoringSSL
Brotli
Cares
Highway
LibDeflate
LolHtml
Lshpack

View File

@@ -1,33 +0,0 @@
register_repository(
NAME
highway
REPOSITORY
google/highway
COMMIT
12b325bc1793dee68ab2157995a690db859fe9e0
)
set(HIGHWAY_CMAKE_ARGS
# Build a static library
-DBUILD_SHARED_LIBS=OFF
# Enable position-independent code for linking into the main executable
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
# Disable unnecessary components
-DHWY_ENABLE_TESTS=OFF
-DHWY_ENABLE_EXAMPLES=OFF
-DHWY_ENABLE_CONTRIB=OFF
# Disable building of the install target
-DHWY_ENABLE_INSTALL=OFF
)
register_cmake_command(
TARGET
highway
LIBRARIES
hwy
ARGS
${HIGHWAY_CMAKE_ARGS}
INCLUDES
.
hwy
)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
ebiggers/libdeflate
COMMIT
78051988f96dc8d8916310d8b24021f01bd9e102
733848901289eca058804ca0737f8796875204c8
)
register_cmake_command(

View File

@@ -20,7 +20,7 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
set(ZIG_COMMIT "deab5c9e7526de0a47b449c5545c3a0f66ebc3c8")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")
@@ -50,7 +50,7 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT ${BUILDKITE})
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT OFF)
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})

View File

@@ -55,7 +55,7 @@ RUN apt-get update -qq \
&& which bun \
&& bun --version
FROM debian:bookworm-slim
FROM debian:bullseye-slim
# Disable the runtime transpiler cache by default inside Docker containers.
# On ephemeral containers, the cache is not useful

View File

@@ -56,7 +56,7 @@ RUN apt-get update -qq \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun
FROM debian:bookworm
FROM debian:bullseye
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun

View File

@@ -21,6 +21,28 @@ const exists = await redis.exists("greeting");
await redis.del("greeting");
```
{% features title="Features" %}
{% icon size=20 name="Bolt" /%} Fast native implementation using Zig and JavaScriptCore
{% icon size=20 name="Link" /%} Automatic pipelining for better performance
{% icon size=20 name="EthernetPort" /%} Auto-reconnect with exponential backoff
{% icon size=20 name="Omega" /%} Support for RESP3 protocol
{% icon size=20 name="Lock" /%} TLS support
{% icon size=20 name="Clock" /%} Connection management with configurable timeouts
{% icon size=20 name="IndentDecrease" /%} Offline command queue
{% icon size=20 name="Settings" /%} Automatic configuration with environment variables
{% icon size=20 name="Hash" /%} Support for hash, set, and other Redis data structures
{% /features %}
## Getting Started
To use the Redis client, you first need to create a connection:
@@ -59,7 +81,7 @@ await client.set("key", "value");
await client.get("key");
// Explicitly close the connection when done
client.close();
client.disconnect();
```
You can also manually control the connection lifecycle:
@@ -74,7 +96,7 @@ await client.connect();
await client.set("key", "value");
// Disconnect when done
client.close();
client.disconnect();
```
## Basic Operations
@@ -219,7 +241,7 @@ client.onclose = error => {
// Manually connect/disconnect
await client.connect();
client.close();
client.disconnect();
```
### Connection Status and Monitoring

View File

@@ -32,7 +32,7 @@ pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
const gen = bun.gen.math; // "math" being this file's basename
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const JSC = bun.JSC;
```

View File

@@ -106,6 +106,8 @@ This page is updated regularly to reflect compatibility status of the latest ver
🟡 Missing `secureHeapUsed` `setEngine` `setFips`
Some methods are not optimized yet.
### [`node:domain`](https://nodejs.org/api/domain.html)
🟡 Missing `Domain` `active`
@@ -377,7 +379,6 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`require()`](https://nodejs.org/api/globals.html#require)
🟢 Fully implemented, including [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options).
### [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
🟢 Fully implemented.

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -98,7 +98,7 @@ chunks.push(`// Auto-generated file. Do not edit.
// This used to be a comptime block, but it made the build too slow.
// Compressing the completions list saves about 100 KB of binary size.
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const zstd = bun.zstd;
const Environment = bun.Environment;

View File

@@ -1,5 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
@@ -12,6 +12,7 @@ const C = bun.C;
const clap = @import("../src/deps/zig-clap/clap.zig");
const URL = @import("../src/url.zig").URL;
const Headers = @import("../src/bun.js/webcore/response.zig").Headers;
const Method = @import("../src/http/method.zig").Method;
const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType;
const HeadersTuple = ColonListType(string, noop_resolver);

View File

@@ -61,6 +61,7 @@ zig_keywords = {
'try',
'union',
'unreachable',
'usingnamespace',
'var',
'volatile',
'while',

View File

@@ -1,6 +1,6 @@
// most of this file is copy pasted from other files in misctools
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,10 +1,10 @@
{
"$schema": "https://raw.githubusercontent.com/oxc-project/oxc/refs/heads/main/npm/oxlint/configuration_schema.json",
"categories": {
"correctness": "error"
"correctness": "warn" // TODO: gradually fix bugs and turn this to error
},
"rules": {
"const-comparisons": "error",
"const-comparisons": "off", // TODO: there's a bug when comparing private identifiers. Re-enable once it's fixed.
"no-cond-assign": "error",
"no-const-assign": "error",
"no-debugger": "error",
@@ -13,35 +13,12 @@
"no-empty-pattern": "error",
"import/no-duplicates": "error",
"no-control-regex": "off",
"no-useless-escape": "off",
"no-this-alias": "off", // many intentional this aliases
"triple-slash-reference": "off", // many intentional triple slash references
// This rule is dumb.
// Array.from is MUCH slower than new Array(size).
"no-new-array": "off",
// We have custom thenables. This is not a bug.
"no-thenable": "off",
"no-undef-init": "error",
// We use this in some cases. The ordering is deliberate.
"no-unsafe-finally": "off",
// We use !!$debug to check if the debugger is enabled.
// Boolean() is also generally slower than !!.
"no-extra-boolean-cast": "off",
// Eslint is not a type checker.
"no-throw-literal": "off"
"no-useless-escape": "off" // there's a lot of these. Should be fixed eventually.
},
"ignorePatterns": [
"vendor",
"build",
"bench",
"test/snapshots/**",
"bench/react-hello-world/*.js",
"bun.lock",
@@ -55,13 +32,8 @@
"test/bundler/transpiler/with-statement-works.js", // parser doesn't allow `with` statement
"test/js/node/module/extensions-fixture", // these files are not meant to be linted
"test/cli/run/module-type-fixture",
"test/bundler/transpiler/with-statement-works.js", // parser doesn't allow `with` statement
// TODO: fix these
"src/js/node/http2.ts",
"src/js/node/http.ts"
"test/bundler/transpiler/with-statement-works.js" // parser doesn't allow `with` statement
],
"overrides": [
{
"files": ["test/**", "examples/**", "packages/bun-internal/test/runners/**"],

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.11",
"version": "1.2.10",
"workspaces": [
"./packages/bun-types"
],
@@ -31,7 +31,6 @@
},
"scripts": {
"build": "bun run build:debug",
"watch": "bun zig build check --watch -fincremental --prominent-compile-errors",
"bd": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
@@ -52,7 +51,7 @@
"fmt": "bun run prettier",
"fmt:cpp": "bun run clang-format",
"fmt:zig": "bun run zig-format",
"lint": "bunx oxlint --config=oxlint.json --format=github src/js",
"lint": "oxlint --config oxlint.json",
"lint:fix": "oxlint --config oxlint.json --fix",
"test": "node scripts/runner.node.mjs --exec-path ./build/debug/bun-debug",
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",

172
packages/bun-polyfills/.gitignore vendored Normal file
View File

@@ -0,0 +1,172 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
# Misc
_*
.old
.vscode
!build

View File

@@ -0,0 +1,9 @@
# Bun APIs Polyfills
Polyfills for Bun's JavaScript runtime APIs for use in environments outside of Bun, such as Node.js or the browser¹.
¹ **Note:** The current priority is Node.js, browser support will vary per polyfill.
## Usage
This is currently a work in progress and is not ready for general use.

View File

@@ -0,0 +1,181 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "bun-polyfills",
"dependencies": {
"bun-wasm": "link:bun-wasm",
"chalk": "^5.3.0",
"js-md4": "^0.3.2",
"open-editor": "^4.0.0",
"supports-color": "^9.4.0",
"which": "^3.0.1",
},
"devDependencies": {
"@types/node": "^20.4.5",
"@types/which": "^3.0.0",
"bun-types": "^0.7.0",
"copyfiles": "^2.4.1",
},
"peerDependencies": {
"typescript": "^5.0.0",
},
"optionalPeers": [
"typescript",
],
},
},
"packages": {
"@types/node": ["@types/node@20.4.5", "", {}, "sha512-rt40Nk13II9JwQBdeYqmbn2Q6IVTA5uPhvSO+JVqdXw/6/4glI6oR9ezty/A9Hg5u7JH4OmYmuQ+XvjKm0Datg=="],
"@types/which": ["@types/which@3.0.0", "", {}, "sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ=="],
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
"brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="],
"bun-types": ["bun-types@0.7.0", "", {}, "sha512-jXFiYtwSUQtD/Y3LHRWeWNwhFaUYvcO96zI7y3gSPgTq+ozxXpuTGDxABLdIKmFc672Q7Qp/OgrfJFEjg4Mnkg=="],
"bun-wasm": ["bun-wasm@link:bun-wasm", {}],
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
"cliui": ["cliui@7.0.4", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" } }, "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ=="],
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
"copyfiles": ["copyfiles@2.4.1", "", { "dependencies": { "glob": "^7.0.5", "minimatch": "^3.0.3", "mkdirp": "^1.0.4", "noms": "0.0.0", "through2": "^2.0.1", "untildify": "^4.0.0", "yargs": "^16.1.0" }, "bin": { "copyfiles": "copyfiles", "copyup": "copyfiles" } }, "sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg=="],
"core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="],
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
"define-lazy-prop": ["define-lazy-prop@2.0.0", "", {}, "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og=="],
"emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
"env-editor": ["env-editor@1.1.0", "", {}, "sha512-7AXskzN6T7Q9TFcKAGJprUbpQa4i1VsAetO9rdBqbGMGlragTziBgWt4pVYJMBWHQlLoX0buy6WFikzPH4Qjpw=="],
"escalade": ["escalade@3.1.1", "", {}, "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="],
"execa": ["execa@5.1.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" } }, "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg=="],
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
"get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
"get-stream": ["get-stream@6.0.1", "", {}, "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg=="],
"glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="],
"human-signals": ["human-signals@2.1.0", "", {}, "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw=="],
"inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="],
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
"is-docker": ["is-docker@2.2.1", "", { "bin": { "is-docker": "cli.js" } }, "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ=="],
"is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
"is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
"is-wsl": ["is-wsl@2.2.0", "", { "dependencies": { "is-docker": "^2.0.0" } }, "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww=="],
"isarray": ["isarray@0.0.1", "", {}, "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ=="],
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
"js-md4": ["js-md4@0.3.2", "", {}, "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA=="],
"line-column-path": ["line-column-path@3.0.0", "", { "dependencies": { "type-fest": "^2.0.0" } }, "sha512-Atocnm7Wr9nuvAn97yEPQa3pcQI5eLQGBz+m6iTb+CVw+IOzYB9MrYK7jI7BfC9ISnT4Fu0eiwhAScV//rp4Hw=="],
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
"mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="],
"minimatch": ["minimatch@3.0.8", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q=="],
"mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
"noms": ["noms@0.0.0", "", { "dependencies": { "inherits": "^2.0.1", "readable-stream": "~1.0.31" } }, "sha512-lNDU9VJaOPxUmXcLb+HQFeUgQQPtMI24Gt6hgfuMHRJgMRHMF/qZ4HJD3GDru4sSw9IQl2jPjAYnQrdIeLbwow=="],
"npm-run-path": ["npm-run-path@4.0.1", "", { "dependencies": { "path-key": "^3.0.0" } }, "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw=="],
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
"onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="],
"open": ["open@8.4.2", "", { "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", "is-wsl": "^2.2.0" } }, "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ=="],
"open-editor": ["open-editor@4.0.0", "", { "dependencies": { "env-editor": "^1.0.0", "execa": "^5.1.1", "line-column-path": "^3.0.0", "open": "^8.4.0" } }, "sha512-5mKZ98iFdkivozt5XTCOspoKbL3wtYu6oOoVxfWQ0qUX9NYsK8pdkHE7VUHXr+CwyC3nf6mV0S5FPsMS65innw=="],
"path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="],
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
"process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="],
"readable-stream": ["readable-stream@1.0.34", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", "isarray": "0.0.1", "string_decoder": "~0.10.x" } }, "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg=="],
"require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
"safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
"signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="],
"string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
"string_decoder": ["string_decoder@0.10.31", "", {}, "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ=="],
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
"strip-final-newline": ["strip-final-newline@2.0.0", "", {}, "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA=="],
"supports-color": ["supports-color@9.4.0", "", {}, "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw=="],
"through2": ["through2@2.0.5", "", { "dependencies": { "readable-stream": "~2.3.6", "xtend": "~4.0.1" } }, "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ=="],
"type-fest": ["type-fest@2.19.0", "", {}, "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="],
"untildify": ["untildify@4.0.0", "", {}, "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
"wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
"y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
"yargs": ["yargs@16.2.0", "", { "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw=="],
"yargs-parser": ["yargs-parser@20.2.9", "", {}, "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w=="],
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"glob/minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
"through2/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
"through2/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],
"through2/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
}
}

View File

@@ -0,0 +1,110 @@
// @ts-check
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const { instance } = /** @type {ZighashInstance} */(
await WebAssembly.instantiate(
fs.readFileSync(path.join(path.dirname(fileURLToPath(import.meta.url)), 'zighash.wasm')),
{
env: {
/** @param {any} x */
print(x) { console.log(x); },
},
}
)
);
const exports = instance.exports;
const mem = exports.memory;
const memview = {
get u8() { return new Uint8Array(mem.buffer); },
get u16() { return new Uint16Array(mem.buffer); },
get u32() { return new Uint32Array(mem.buffer); },
get u64() { return new BigUint64Array(mem.buffer); },
get i8() { return new Int8Array(mem.buffer); },
get i16() { return new Int16Array(mem.buffer); },
get i32() { return new Int32Array(mem.buffer); },
get i64() { return new BigInt64Array(mem.buffer); },
get f32() { return new Float32Array(mem.buffer); },
get f64() { return new Float64Array(mem.buffer); },
};
const nullptr = { ptr: -1, size: 0 };
const encoder = new TextEncoder();
const allocBuffer = (
/** @type {ArrayBufferView | ArrayBuffer | SharedArrayBuffer} */ buf,
/** @type {boolean=} */ nullTerminate = false,
) => {
const size = buf.byteLength + +nullTerminate;
if (size === 0) return nullptr;
const ptr = exports.alloc(size);
if (ptr === -1) throw new Error('WASM memory allocation failed');
const u8heap = memview.u8;
u8heap.set(new Uint8Array(ArrayBuffer.isView(buf) ? buf.buffer : buf), ptr);
if (nullTerminate) u8heap[ptr + buf.byteLength] = 0;
return { ptr, size };
};
const allocString = (
/** @type {string} */ str,
/** @type {boolean=} */ nullTerminate = true,
) => {
const strbuf = encoder.encode(str);
return allocBuffer(strbuf, nullTerminate);
};
/** @type {JSSeededHash64Function} */
export function wyhash(input = '', seed = 0n) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return BigInt.asUintN(64, exports.wyhash(ptr, size, seed));
}
/** @type {JSHash32Function} */
export function adler32(input = '') {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.adler32(ptr, size) >>> 0;
}
/** @type {JSHash32Function} */
export function crc32(input = '') {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.crc32(ptr, size) >>> 0;
}
/** @type {JSHash32Function} */
export function cityhash32(input = '') {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.cityhash32(ptr, size) >>> 0;
}
/** @type {JSSeededHash64Function} */
export function cityhash64(input = '', seed = 0n) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return BigInt.asUintN(64, exports.cityhash64(ptr, size, seed));
}
/** @type {JSSeededHash32Function} */
export function xxhash32(input = '', seed = 0) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.xxhash32(ptr, size, seed)
}
/** @type {JSSeededHash64Function} */
export function xxhash64(input = '', seed = 0n) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return BigInt.asUintN(64, exports.xxhash64(ptr, size, seed));
}
/** @type {JSSeededHash64Function} */
export function xxhash3(input = '', seed = 0n) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return BigInt.asUintN(64, exports.xxhash3(ptr, size, seed));
}
/** @type {JSSeededHash32Function} */
export function murmur32v3(input = '', seed = 0) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.murmur32v3(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
}
/** @type {JSSeededHash32Function} */
export function murmur32v2(input = '', seed = 0) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return exports.murmur32v2(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
}
/** @type {JSSeededHash64Function} */
export function murmur64v2(input = '', seed = 0n) {
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
return BigInt.asUintN(64, exports.murmur64v2(ptr, size, seed));
}

View File

@@ -0,0 +1,10 @@
{
"private": true,
"type": "module",
"name": "zighash-wasm",
"module": "index.mjs",
"scripts": {
"build": "bun run clean && zig build-lib src/main.zig --name zighash -target wasm32-freestanding -dynamic -rdynamic -OReleaseSmall",
"clean": "rm -f *.wasm *.o"
}
}

View File

@@ -0,0 +1,73 @@
const std = @import("std");
extern fn print(*const u8) void;
comptime {
std.debug.assert(@alignOf(u16) >= 2);
std.debug.assert(@alignOf(u32) >= 4);
std.debug.assert(@alignOf(u64) >= 8);
std.debug.assert(@alignOf(i16) >= 2);
std.debug.assert(@alignOf(i32) >= 4);
std.debug.assert(@alignOf(i64) >= 8);
}
export fn alloc(size: u32) [*]const u8 {
const slice = std.heap.wasm_allocator.alloc(u8, size) catch @panic("wasm failed to allocate memory");
return slice.ptr;
}
export fn wyhash(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Wyhash.hash(seed, input);
}
export fn adler32(input_ptr: [*]const u8, input_size: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Adler32.hash(input);
}
export fn crc32(input_ptr: [*]const u8, input_size: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Crc32.hash(input);
}
export fn cityhash32(input_ptr: [*]const u8, input_size: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.CityHash32.hash(input);
}
export fn cityhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.CityHash64.hashWithSeed(input, seed);
}
export fn xxhash32(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.XxHash32.hash(seed, input);
}
export fn xxhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.XxHash64.hash(seed, input);
}
export fn xxhash3(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.XxHash3.hash(seed, input);
}
export fn murmur32v3(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Murmur3_32.hashWithSeed(input, seed);
}
export fn murmur32v2(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Murmur2_32.hashWithSeed(input, seed);
}
export fn murmur64v2(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
const input: []const u8 = input_ptr[0..input_size];
defer std.heap.wasm_allocator.free(input);
return std.hash.Murmur2_64.hashWithSeed(input, seed);
}

View File

@@ -0,0 +1,28 @@
type WasmHash32Function = (input_ptr: number, input_size: number) => number;
type WasmHash64Function = (input_ptr: number, input_size: number) => bigint;
type WasmSeededHash32Function = (input_ptr: number, input_size: number, seed: number) => number;
type WasmSeededHash64Function = (input_ptr: number, input_size: number, seed: bigint) => bigint;
type JSHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
type JSHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => bigint;
type JSSeededHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
type JSSeededHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
type ZighashInstance = WebAssembly.WebAssemblyInstantiatedSource & {
instance: {
exports: {
memory: WebAssembly.Memory,
alloc(size: number): number,
wyhash: WasmSeededHash64Function,
adler32: WasmHash32Function,
crc32: WasmHash32Function,
cityhash32: WasmHash32Function,
cityhash64: WasmSeededHash64Function,
xxhash32: WasmSeededHash32Function,
xxhash64: WasmSeededHash64Function,
xxhash3: WasmSeededHash64Function,
murmur32v3: WasmSeededHash32Function,
murmur32v2: WasmSeededHash32Function,
murmur64v2: WasmSeededHash64Function,
};
};
}

Binary file not shown.

View File

@@ -0,0 +1,30 @@
{
"type": "module",
"name": "bun-polyfills",
"module": "src/index.ts",
"devDependencies": {
"@types/node": "^20.4.5",
"@types/which": "^3.0.0",
"bun-types": "^0.7.0",
"copyfiles": "^2.4.1"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"scripts": {
"node": "node --enable-source-maps --import ./dist/src/repl.js",
"clean": "rm -rf dist",
"preprocess": "bun tools/updateversions.ts",
"build": "bun run clean && bun run preprocess && bunx tsc && bunx copyfiles \"./lib/**/*.wasm\" dist",
"build/wasm": "bun run build/zighash",
"build/zighash": "cd lib/zighash && bun run build && cd ../.."
},
"dependencies": {
"bun-wasm": "link:bun-wasm",
"chalk": "^5.3.0",
"js-md4": "^0.3.2",
"open-editor": "^4.0.0",
"supports-color": "^9.4.0",
"which": "^3.0.1"
}
}

View File

@@ -0,0 +1,31 @@
//? Implements: Red colored console.error from Bun
//if (Bun.enableANSIColors) {
// const RED = '\x1B[31m' as const;
// const RESET = '\x1B[0m' as const;
// const consoleError = console.error;
// console.error = (...args) => {
// if (typeof args[0] === 'string') args[0] = RED + args[0];
// consoleError(...args, RESET);
// };
//}
//? Implements: for await (const line of console) { ... }
console[Symbol.asyncIterator] = async function* () {
while (true) yield await new Promise(resolve => {
process.stdin.on('data', (data: Buffer | string) => {
const str = data.toString('utf-8').replaceAll(/[\r\n]+/g, '');
resolve(str);
});
});
} satisfies Console[typeof Symbol.asyncIterator];
//? Implements: Bun-exclusive console function
console.write = ((...data) => {
const str = data.map(val => {
if (val instanceof ArrayBuffer) val = new TextDecoder('utf-8').decode(val);
else if (typeof val === 'object') val = new TextDecoder('utf-8').decode(val.buffer);
return val;
}).join('');
process.stdout.write(str);
return new TextEncoder('utf-8').encode(str).byteLength;
}) satisfies Console['write'];

View File

@@ -0,0 +1,32 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { createRequire } from 'node:module';
// Without an ESM loader, this polyfill is impossible to apply automatically,
// due to the per-module nature of import.meta. In order to use this polyfill,
// you must import it in every module that uses import.meta, and call it with
// the import.meta object as the argument. When the polyfills are integrated
// with bun build, this could be done automatically by the build process at
// the top of every module file bundled.
export default function polyfillImportMeta(metaIn: ImportMeta) {
const require2 = createRequire(metaIn.url);
const metapath = fileURLToPath(metaIn.url);
const meta: ImportMeta = {
url: metaIn.url,
main: metapath === process.argv[1],
path: metapath,
dir: path.dirname(metapath),
file: path.basename(metapath),
require: require2,
resolve: metaIn.resolve,
resolveSync(id: string, parent?: string) {
return require2.resolve(id, {
paths: typeof parent === 'string' ? [
path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..')
] : undefined,
});
},
};
Object.assign(metaIn, meta);
}

View File

@@ -0,0 +1,45 @@
import { version } from '../modules/bun.js';
import './console.js';
import './process.js';
import os from 'node:os';
//? NodeJS Blob doesn't implement Blob.json(), so we need to polyfill it.
Blob.prototype.json = async function json<T>(this: Blob): Promise<T> {
try {
return JSON.parse(await this.text()) as T;
} catch (err) {
Error.captureStackTrace(err as Error, json);
throw err;
}
};
//? navigator global object polyfill
Reflect.set(globalThis, 'navigator', {
userAgent: `Bun/${version}`,
hardwareConcurrency: os.cpus().length,
});
//? method only available in Bun
// this isn't quite accurate, but it shouldn't break anything and is currently here just for matching bun and node types
const ReadableStreamDefaultReaderPrototype = Object.getPrototypeOf(new ReadableStream().getReader());
Reflect.set(
ReadableStreamDefaultReaderPrototype, 'readMany',
function readMany(this: ReadableStreamDefaultReader): Promise<ReadableStreamDefaultReadManyResult<any>> {
return new Promise((resolve, reject) => {
const result: ReadableStreamDefaultReadManyResult<any> = {
value: [],
size: 0,
done: true
};
this.read().then(({ done, value }) => {
if (done) resolve(result);
else {
result.value.push(value);
result.size = value.length;
result.done = false;
resolve(result);
}
}, reject);
});
}
);

View File

@@ -0,0 +1,19 @@
if (typeof process === 'object' && process !== null) {
// process polyfills (node-only)
Reflect.set(process, 'isBun', 1 satisfies Process['isBun']);
Reflect.set(process, 'browser', false satisfies Process['browser']);
const NULL_VERSION = '0'.repeat(39) + '1';
process.versions.bun = '0.7.1' satisfies Process['versions'][string]; // TODO: This can probably be fetched from somewhere in the repo
process.versions.webkit = NULL_VERSION satisfies Process['versions'][string];
process.versions.mimalloc = NULL_VERSION satisfies Process['versions'][string];
process.versions.libarchive = NULL_VERSION satisfies Process['versions'][string];
process.versions.picohttpparser = NULL_VERSION satisfies Process['versions'][string];
process.versions.boringssl = NULL_VERSION satisfies Process['versions'][string];
process.versions.zig = '0.10.0' satisfies Process['versions'][string];
Reflect.set(process, 'revision', NULL_VERSION satisfies Process['revision']);
// Doesn't work on Windows sadly
//Object.defineProperty(process, 'execPath', { value: path.resolve(root, 'cli.js') });
}

View File

@@ -0,0 +1,3 @@
export * from './modules/bun.js';
export * as default from './modules/bun.js';
import './global/index.js';

View File

@@ -0,0 +1,518 @@
import type {
BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot,
EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash
} from 'bun';
import { TextDecoderStream } from 'node:stream/web';
import { NotImplementedError, type SystemError } from '../utils/errors.js';
import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js';
import dnsPolyfill from './bun/dns.js';
import { FileSink } from './bun/filesink.js';
import {
bunHash, bunHashProto,
MD4 as MD4Polyfill, MD5 as MD5Polyfill,
SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill,
SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill,
SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill
} from './bun/hashes.js';
import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
import TranspilerImpl from './bun/transpiler.js';
import fs from 'node:fs';
import v8 from 'node:v8';
import path from 'node:path';
import util from 'node:util';
import zlib from 'node:zlib';
import streams from 'node:stream';
import workers from 'node:worker_threads';
import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process';
import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url';
import npm_which from 'which';
import openEditor from 'open-editor';
export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main;
//? These are automatically updated on build by tools/updateversions.ts, do not edit manually.
export const version = '0.7.4' satisfies typeof Bun.version;
export const revision = '56816a3ec845a4b9fc40ade34dbe5c0033433d51' satisfies typeof Bun.revision;
export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => {
const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.');
Error.captureStackTrace(err, gc);
throw err;
})) satisfies typeof Bun.gc;
//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter
export const origin = '' satisfies typeof Bun.origin;
// @ts-expect-error ---
export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin;
// @ts-expect-error ---
export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout;
// @ts-expect-error ---
export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr;
export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv;
export const env = process.env satisfies typeof Bun.env;
Object.setPrototypeOf(env, {
toJSON(this: typeof env) { return { ...this }; }
});
// @ts-expect-error supports-color types are unbelievably bad
export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors;
export const hash = bunHash satisfies typeof Bun.hash;
Object.setPrototypeOf(hash, bunHashProto satisfies Hash);
export const unsafe = {
gcAggressionLevel: () => 0, //! no-op
arrayBufferToString: (buf) => new TextDecoder().decode(buf),
segfault: () => {
const segfault = new Error();
segfault.name = 'SegfaultTest';
segfault.message = '';
console.error(segfault);
process.exit(1);
}
} satisfies typeof Bun['unsafe'];
export const Transpiler = TranspilerImpl satisfies typeof Bun.Transpiler;
export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224;
export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512;
export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384;
export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256;
export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256;
export const indexOfLine = ((data, offset) => {
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data);
if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer);
return data.indexOf(10, offset);
}) satisfies typeof Bun.indexOfLine;
const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) {
throw new NotImplementedError('Bun.peek', peek);
};
peek_.status = (promise => {
return util.inspect(promise).includes('<pending>') ? 'pending'
: util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled';
}) satisfies typeof Bun.peek.status;
export const peek = peek_ satisfies typeof Bun.peek;
export const sleep = (ms => {
return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms));
}) satisfies typeof Bun.sleep;
export const sleepSync = (ms => {
if (ms < 0) throw new TypeError('argument to sleepSync must not be negative');
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
}) satisfies typeof Bun.sleepSync;
//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway.
//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically."
//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one.
export const inspect = ((arg: any): string => util.inspect(arg, {
breakLength: Infinity,
colors: false,
compact: true,
customInspect: false,
depth: Infinity,
getters: true,
maxArrayLength: Infinity,
maxStringLength: Infinity,
showHidden: false,
showProxy: false,
sorted: false
})) satisfies typeof Bun.inspect;
export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync;
export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve;
//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js
export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe;
export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => {
process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', {
type: 'BunPolyfillWarning',
code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT',
detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.'
});
const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8');
const json = JSON.parse(raw) as V8HeapSnapshot;
return {
version: 2,
type: 'Inspector',
nodes: json.nodes,
edges: json.edges,
edgeTypes: json.snapshot.meta.edge_types.flat(),
edgeNames: json.snapshot.meta.edge_fields.flat(),
nodeClassNames: json.snapshot.meta.node_types.flat(),
};
// @ts-expect-error Refer to the above emitWarning call
}) satisfies typeof Bun.generateHeapSnapshot;
//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know.
export const shrink = (() => void 0) satisfies typeof Bun.shrink;
export const openInEditor = ((file: string, opts?: EditorOptions) => {
const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const;
if (opts?.editor) openEditor(target, opts);
else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' });
}) satisfies typeof Bun.openInEditor;
export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve;
export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => {
if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file);
return new FileBlob(path, options);
}) satisfies typeof Bun.file;
export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => {
if (!isFileBlob(dest)) {
let fd: number;
if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w');
// bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition...
else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w');
else fd = fs.openSync(Buffer.from(dest.buffer), 'w');
if (input instanceof Response || input instanceof Blob) {
const data = await input.text();
return new Promise((resolve, reject) => {
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
});
}
if (Array.isArray(input)) {
const data = await new Blob(input).text();
return new Promise((resolve, reject) => {
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
});
}
return new Promise((resolve, reject) => {
if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
});
} else {
const writer = dest.writer();
if (Array.isArray(input)) input = new Blob(input);
if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer());
if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input);
if (typeof input === 'string') return writer.write(input);
else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
}
}) satisfies typeof Bun.write;
export const sha = SHA512_256.hash satisfies typeof Bun.sha;
export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds;
//? This just prints out some debug stuff in console, and as the name implies no one should be using it.
//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function.
export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => {
console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.');
}) satisfies unknown; /* undocumented */
export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync;
export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync;
export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync;
export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync;
export const which = ((cmd: string, options) => {
const opts: npm_which.Options = { all: false, nothrow: true };
if (options?.PATH) opts.path = options.PATH;
const result = npm_which.sync(cmd, opts) as string | null;
if (!result || !options?.cwd) return result;
if (path.normalize(result).includes(path.normalize(options.cwd))) return result;
else return null;
}) satisfies typeof Bun.which;
export const spawn = ((...args) => {
let cmd: string;
let argv: string[];
let opts: SpawnOptions.OptionsObject;
if (args[0] instanceof Array) {
cmd = args[0][0];
argv = args[0].slice(1);
opts = isOptions(args[1]) ? args[1] : {};
} else {
cmd = args[0].cmd[0];
argv = args[0].cmd.slice(1);
opts = args[0];
Reflect.deleteProperty(opts, 'cmd');
}
let stdio: StdioOptions = [];
opts.stdio ??= [undefined, undefined, undefined];
if (opts.stdin) opts.stdio[0] = opts.stdin;
if (opts.stdout) opts.stdio[1] = opts.stdout;
if (opts.stderr) opts.stdio[2] = opts.stderr;
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
let std = opts.stdio[i];
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
else stdio[i] = std;
}
let stdinSrc: typeof opts.stdio[0] = null;
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
stdinSrc = opts.stdio[0];
stdio[0] = 'pipe';
}
const subp = chp.spawn(cmd, argv, {
cwd: opts.cwd ?? process.cwd(),
// why is this set to (string | number) on env values...
env: { ...(opts.env as Record<string, string> ?? process.env) },
stdio
}) as unknown as Subprocess;
const subpAsNode = subp as unknown as ChildProcess;
const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const;
if (subpAsNode.stdout) {
const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream;
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
return this;
});
(<Mutable<Subprocess>>subp).stdout = rstream;
}
if (subpAsNode.stderr) {
const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream;
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
return this;
});
(<Mutable<Subprocess>>subp).stderr = rstream;
}
let internalStdinStream: streams.Writable;
if (subpAsNode.stdin) {
const wstream = subpAsNode.stdin;
Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) {
void this.end(); /* if it fails its already closed */
return this;
});
internalStdinStream = wstream;
(<Mutable<Subprocess>>subp).stdin = new FileSink(wstream);
}
Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } });
Object.defineProperty(subp, 'exited', {
value: new Promise((resolve, reject) => {
subpAsNode.once('exit', (code) => {
stdstreams[0]?.destroy();
stdstreams[1]?.destroy();
stdstreams[2]?.destroy();
subp.kill();
subp.unref();
subpAsNode.disconnect?.();
subpAsNode.removeAllListeners();
resolve(code);
});
})
});
if (stdinSrc) subpAsNode.once('spawn', () => {
const stdinWeb = streams.Writable.toWeb(internalStdinStream);
if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]);
if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb);
else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb);
else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream);
else void stdinSrc;
});
// change the error stack to point to the spawn() call instead of internal Node.js callback stuff
const here = new Error('§__PLACEHOLDER__§');
Error.captureStackTrace(here, spawn);
if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => {
err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? ''));
err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message);
throw err;
}) as unknown as Subprocess;
return subp;
}) satisfies typeof Bun.spawn;
export const spawnSync = ((...args): SyncSubprocess => {
let cmd: string;
let argv: string[];
let opts: SpawnOptions.OptionsObject;
if (args[0] instanceof Array) {
cmd = args[0][0];
argv = args[0].slice(1);
opts = isOptions(args[1]) ? args[1] : {};
} else {
cmd = args[0].cmd[0];
argv = args[0].cmd.slice(1);
opts = args[0];
Reflect.deleteProperty(opts, 'cmd');
}
let stdio: StdioOptions = [];
opts.stdio ??= [undefined, undefined, undefined];
if (opts.stdin) opts.stdio[0] = opts.stdin;
if (opts.stdout) opts.stdio[1] = opts.stdout;
if (opts.stderr) opts.stdio[2] = opts.stderr;
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
let std = opts.stdio[i];
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
else stdio[i] = std;
}
let input: ArrayBufferView | string | undefined;
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
stdio[0] = null; // will be overridden by chp.spawnSync "input" option
//! Due to the fully async nature of Blobs, Responses and Requests,
//! we can't synchronously get the data out of them here in userland.
if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync);
else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync);
else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]);
else input = opts.stdio[0] as ArrayBufferView;
}
const subp = chp.spawnSync(cmd, argv, {
cwd: opts.cwd ?? process.cwd(),
env: { ...(opts.env as Record<string, string> ?? process.env) },
stdio, input
}) as unknown as SyncSubprocess;
const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>;
if (subpAsNode.error) throw subpAsNode.error;
subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal)
subp.success = subp.exitCode === 0;
return subp;
}) satisfies typeof Bun.spawnSync;
export const escapeHTML = ((input) => {
const str = String(input);
let out = '';
for (let i = 0; i < str.length; i++) {
const char = str[i];
switch (char) {
case '"': out += '&quot;'; break;
case "'": out += '&#x27;'; break;
case '&': out += '&amp;'; break;
case '<': out += '&lt;'; break;
case '>': out += '&gt;'; break;
default: out += char;
}
}
return out;
}) satisfies typeof Bun.escapeHTML;
export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => {
return (async () => {
const sink = new ArrayBufferSink();
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
sink.write(value);
}
return sink.end() as ArrayBuffer;
})();
}) satisfies typeof Bun.readableStreamToArrayBuffer;
export const readableStreamToBytes = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): Uint8Array | Promise<Uint8Array> => {
return (async () => {
const sink = new ArrayBufferSink();
sink.start({ asUint8Array: true });
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
sink.write(value);
}
return sink.end() as Uint8Array;
})();
}) satisfies typeof Bun.readableStreamToBytes;
export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => {
let result = '';
const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader
while (true) {
const { done, value } = await reader.read();
//! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh
if (done || !value || !value?.length) break;
result += value;
}
return result;
}) satisfies typeof Bun.readableStreamToText;
export const readableStreamToBlob = (async (stream: ReadableStream<any>) => {
const parts = await readableStreamToArray(stream);
return new Blob(parts as BlobPart[]);
}) satisfies typeof Bun.readableStreamToBlob;
export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => {
const array = new Array<T>();
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done || !value || !(<any>value)?.length) break;
array.push(value as unknown as T);
}
return array;
}) satisfies typeof Bun.readableStreamToArray;
export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => {
const text = await readableStreamToText(stream);
try {
return JSON.parse(text) as T;
} catch (err) {
Error.captureStackTrace(err as Error, readableStreamToJSON);
throw err;
}
}) satisfies typeof Bun.readableStreamToJSON;
export const concatArrayBuffers = ((buffers, maxLength = Infinity, asUint8Array = false) => {
let size = 0;
for (const chunk of buffers) size += chunk.byteLength;
size = Math.min(size, maxLength);
const buffer = new ArrayBuffer(size);
const view = new Uint8Array(buffer);
let offset = 0;
for (const chunk of buffers) {
if (offset > size) break;
view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset);
offset += chunk.byteLength;
}
if (asUint8Array) return view;
return buffer;
}) satisfies typeof Bun.concatArrayBuffers;
export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink;
export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL;
export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath;
export const dns = dnsPolyfill satisfies typeof Bun.dns;
export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread;
//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats.
//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible,
//! but without needing to strip out all Bun.plugin calls from the source code for running on Node.
const dummyPluginBuilder: PluginBuilder = ({
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void {
return; // stubbed
},
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void {
return; // stubbed
},
config: { plugins: [], entrypoints: [] },
}) satisfies PluginBuilder;
const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>;
bunPlugin.clearAll = () => void 0;
export const plugin = bunPlugin satisfies typeof Bun.plugin;
/*void plugin({
name: 'test',
target: 'bun',
setup(builder) {
if (builder.target !== 'bun') return;
builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => {
args.importer;
if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' };
else return;
});
builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => {
args.path;
return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' };
});
}
});*/

View File

@@ -0,0 +1,67 @@
type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>;
export class ArrayBufferSink implements BunArrayBufferSink {
#started: boolean = true;
#closed: boolean = false;
#offset: number = 0;
#stream: boolean = false;
#asUint8: boolean = false;
#buffer: Buffer = Buffer.allocUnsafe(8192);
get sinkId(): number { return 0; } //? undocumented, seems to always return 0
#ASSERT_NOT_CLOSED(caller: AnyFunction): void {
if (!this.#closed) return;
const err = new TypeError('Expected Sink');
Error.captureStackTrace(err, caller);
throw err;
}
start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void {
this.#ASSERT_NOT_CLOSED(this.start);
this.#started = true;
this.#offset = 0;
this.#stream = stream;
this.#asUint8 = asUint8Array;
if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark);
}
write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
this.#ASSERT_NOT_CLOSED(this.write);
if (typeof data === 'string') data = new TextEncoder().encode(data);
const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
// this is very bad API design to not throw an error here, but it's what Bun does
if (!this.#started) return writedata.byteLength;
if (this.#offset + writedata.byteLength > this.#buffer.byteLength) {
const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024;
const newBuffer = Buffer.allocUnsafe(newLength);
newBuffer.set(this.#buffer);
this.#buffer = newBuffer;
}
this.#buffer.set(writedata, this.#offset);
this.#offset += writedata.byteLength;
return writedata.byteLength;
}
flush(): number | Uint8Array | ArrayBuffer {
this.#ASSERT_NOT_CLOSED(this.flush);
if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing
const flushed = new Uint8Array(this.#offset);
flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice
this.#offset = 0;
return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer;
}
end(): Uint8Array | ArrayBuffer {
this.#ASSERT_NOT_CLOSED(this.end);
const stream = this.#stream;
this.#stream = true; // force flush() to return the data
const buffer = this.flush() as Uint8Array | ArrayBuffer;
this.#stream = stream;
this.#started = false;
return buffer;
}
close(): void { this.#closed = true; } //? undocumented
}

View File

@@ -0,0 +1,21 @@
import dns from 'node:dns';
const dnsObj: typeof Bun.dns = {
async lookup(hostname, options) {
const opts = { verbatim: true, all: true } as dns.LookupOptions;
if (options?.family) {
if (options.family === 'IPv4') opts.family = 4;
else if (options.family === 'IPv6') opts.family = 6;
else if (options.family === 'any') opts.family = 0;
else opts.family = options.family;
}
if (options?.flags) opts.hints = options.flags;
const records = ((await dns.promises.resolveAny(hostname))
.filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[])
.map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl }));
return records;
},
// This has more properties but they're not documented on bun-types yet, oh well.
};
export default dnsObj;

View File

@@ -0,0 +1,195 @@
import fs from 'node:fs';
import tty from 'node:tty';
import streams from 'node:stream';
import { ReadableStream as NodeWebReadableStream } from 'node:stream/web';
import { FileSink } from './filesink.js';
import { SystemError } from '../../utils/errors.js';
import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun';
type NodeJSStream = streams.Readable | streams.Writable;
function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> {
if (stream.isPaused()) stream.resume();
return new Promise((resolve, reject) => {
const chunks: any[] = [];
const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); };
const end = () => {
resolve(new Blob(chunks, type != null ? { type } : undefined));
stream.off('data', dataHandler);
stream.off('end', end);
stream.pause();
};
stream.once('data', dataHandler).once('end', end);
//.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we.
});
}
export const NodeJSStreamFileBlob = class FileBlob extends Blob {
constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') {
super(undefined, { type });
Reflect.deleteProperty(this, 'size');
if (source === process.stdout || source === process.stdin || source === process.stderr) {
this.#iostream = true;
}
this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream);
this.#source = source;
this.#slice = slice;
this.#size = Infinity;
}
readonly #iostream: boolean = false;
readonly #readable: boolean;
readonly #source: NodeJSStream;
readonly #slice: [number?, number?];
#size: number;
slice(begin?: number, end?: number, contentType?: string): Blob;
slice(begin?: number, contentType?: string): Blob;
slice(contentType?: string): Blob;
slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob {
if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType);
if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType);
return new FileBlob(this.#source, [beginOrType, endOrType], contentType);
}
override stream(): ReadableStream<Uint8Array> {
// This makes no sense but Bun does it so we will too
if (!this.#readable) return new ReadableStream();
return streams.Readable.toWeb(this.#source as streams.Readable);
}
#blobStackFn: AnyFunction = this.#getBlob;
async #getBlob(): Promise<Blob> {
if (!this.#readable) {
const err = new SystemError(-1, 'read');
Error.captureStackTrace(err, this.#blobStackFn);
throw err;
}
const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice);
this.#size = blob.size;
return blob;
}
override async text(): Promise<string> {
if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text;
return (await this.#getBlob()).text();
}
override async arrayBuffer(): Promise<ArrayBuffer> {
this.#blobStackFn = this.arrayBuffer;
return (await this.#getBlob()).arrayBuffer();
}
override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> {
this.#blobStackFn = this.json;
return JSON.parse(await this.text()) as Promise<TJSONReturnType>;
}
override get size(): number { return this.#size; }
override set size(_) { return; }
};
export class FileBlob extends Blob implements BunFileBlob {
constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) {
opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension
super(undefined, opts);
Reflect.deleteProperty(this, 'size');
if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob;
const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined;
if (slice) {
slice[0] &&= slice[0] | 0; // int cast
slice[1] &&= slice[1] | 0; // int cast
this.#slice = slice;
slice[0] ??= 0;
if (typeof slice[1] === 'undefined') {
if (slice[0] < 0) this.#sliceSize = -slice[0];
}
else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]);
else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0];
}
if (typeof fdOrPath === 'string') try {
this.#fd = fs.openSync(fdOrPath, 'r+');
} catch (err) {
this.#error = err as SystemError;
}
else {
this.#fd = fdOrPath;
this.#error = Reflect.get(opts, '__error') as SystemError | undefined;
}
if (!this.#error) {
const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] });
this.#readable = streams.Readable.toWeb(rstream);
}
}
readonly #readable?: NodeWebReadableStream;
readonly #error?: SystemError;
readonly #slice: [number?, number?] = [];
readonly #sliceSize: number = 0;
readonly #fd: number = NaN;
#data?: Blob;
#read() {
if (this.#error) throw this.#error;
const read = fs.readFileSync(this.#fd);
this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type });
}
//! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side
get readable(): ReadableStream<any> {
if (this.#error) throw this.#error;
return this.#readable! as ReadableStream;
}
get lastModified(): number {
if (this.#error) throw this.#error;
return fs.fstatSync(this.#fd).mtimeMs;
}
async exists(): Promise<boolean> {
return !this.#error;
}
writer(): BunFileSink {
if (this.#error) throw this.#error;
return new FileSink(this.#fd);
}
// TODO: what's contentType?
override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob {
if (typeof begin === 'string') {
contentType = begin;
begin = undefined;
}
if (typeof end === 'string') {
contentType = end;
end = undefined;
}
return new FileBlob(this.#fd, {
__error: this.#error,
__slice: [begin, end],
__data: this.#data?.slice(begin, end),
} as BlobPropertyBag);
}
override arrayBuffer(): Promise<ArrayBuffer> {
if (!this.#data) this.#read();
return new Blob([this.#data ?? '']).arrayBuffer();
}
override text(): Promise<string> {
if (!this.#data) this.#read();
return new Blob([this.#data ?? '']).text();
}
override json(): Promise<any>;
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> {
if (!this.#data) this.#read();
return new Blob([this.#data ?? '']).json();
}
override stream(): NodeJS.ReadableStream;
override stream(): ReadableStream<Uint8Array>;
override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream {
if (!this.#data) this.#read();
return new Blob([this.#data ?? '']).stream();
}
override get size(): number {
return this.#data?.size ?? (this.#sliceSize || 0);
}
}

View File

@@ -0,0 +1,87 @@
import fs from 'node:fs';
import { SystemError } from '../../utils/errors.js';
import type { FileSink as BunFileSink } from 'bun';
export class FileSink implements BunFileSink {
constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) {
if (typeof fdOrPathOrStream === 'string') try {
this.#fd = fs.openSync(fdOrPathOrStream, 'a+');
fs.ftruncateSync(this.#fd, 0);
} catch (err) {
throw err as SystemError;
}
else if (typeof fdOrPathOrStream === 'number') {
this.#fd = fdOrPathOrStream; // hope this fd is writable
fs.ftruncateSync(this.#fd, 0);
}
else {
this.#stream = fdOrPathOrStream;
}
}
#fd: number = NaN;
#stream: NodeJS.WritableStream | undefined;
#closed: boolean = false;
#writtenSinceFlush: number = 0;
#totalWritten: number = 0;
start(options?: { highWaterMark?: number | undefined; } | undefined): void {
return; // TODO
}
ref(): void {
return; // TODO
}
unref(): void {
return; // TODO
}
write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
if (this.#closed) {
return typeof chunk === 'string' ? chunk.length : chunk.byteLength;
}
if (this.#stream) {
let data;
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk);
else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer);
else data = chunk;
this.#stream.write(data);
const written = typeof data === 'string' ? data.length : data.byteLength;
this.#totalWritten += written;
return written;
}
if (typeof chunk === 'string') {
fs.appendFileSync(this.#fd, chunk, 'utf8');
this.#writtenSinceFlush += chunk.length;
return chunk.length;
}
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk));
else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer));
this.#writtenSinceFlush += chunk.byteLength;
return chunk.byteLength;
}
//! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior
flush(): number | Promise<number> {
if (this.#closed) return 0;
// no-op because this is a synchronous implementation
const written = this.#writtenSinceFlush;
this.#writtenSinceFlush = 0;
return written;
}
//! not sure what to do with this error
end(error?: Error): number | Promise<number> {
if (this.#closed) return this.#totalWritten;
const flushed = this.flush();
if (this.#stream) {
this.#stream.end();
this.#closed = true;
return flushed;
}
this.#totalWritten = fs.fstatSync(this.#fd).size;
fs.closeSync(this.#fd);
this.#closed = true;
return flushed;
}
}

View File

@@ -0,0 +1,188 @@
import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun';
import nodecrypto from 'node:crypto';
import os from 'node:os';
import md4, { Md4 } from 'js-md4';
import { wyhash, adler32, crc32, cityhash32, cityhash64, xxhash32, xxhash64, xxhash3, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs';
export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash;
export const bunHashProto: Hash = {
wyhash(data, seed = 0n) { return wyhash(data, seed); },
adler32(data) { return adler32(data); },
crc32(data) { return crc32(data); },
cityHash32(data) { return cityhash32(data); },
cityHash64(data, seed = 0n) { return cityhash64(data, seed); },
xxHash32(data, seed = 0) { return xxhash32(data, seed); },
xxHash64(data, seed = 0n) { return xxhash64(data, seed); },
xxHash3(data, seed = 0n) { return xxhash3(data, seed); },
murmur32v3(data, seed = 0) { return murmur32v3(data, seed); },
murmur32v2(data, seed = 0) { return murmur32v2(data, seed); },
murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); },
};
type HashImpl = {
digest(): Buffer;
digest(encoding: nodecrypto.BinaryToTextEncoding): string;
update(data: nodecrypto.BinaryLike): HashImpl;
update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl;
};
abstract class BaseHash<T> implements CryptoHashInterface<T> {
readonly #hash: HashImpl | null;
constructor(algorithm: string | HashImpl) {
if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm);
// If no preset algorithm is given, expect the subclass to fully implement its own.
else this.#hash = algorithm;
}
update(data: StringOrBuffer) {
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data));
else this.#hash!.update(data);
return this as unknown as T; // is there any good way to do this without asserting?
}
digest(encoding: DigestEncoding): string;
digest(hashInto?: TypedArray): TypedArray;
digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
if (typeof encodingOrHashInto === 'string') {
const encoded = this.#hash!.digest(encodingOrHashInto);
// you'd think node would throw an error if the encoding is invalid, but nope!
// instead it silently returns as if you passed no encoding and gives a Buffer...
if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`);
else return encoded;
}
const digested = this.#hash!.digest();
if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength);
if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`);
if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) {
// avoid checking endianness for every loop iteration
const endianAwareInsert = os.endianness() === 'LE'
? (arr: string[], j: number, num: string) => arr[7 - j] = num
: (arr: string[], j: number, num: string) => arr[j] = num;
for (let i = 0; i < digested.byteLength; i += 8) {
const bigintStrArr = ['', '', '', '', '', '', '', ''];
for (let j = 0; j < 8; j++) {
const byte = digested[i + j];
if (byte === undefined) break;
endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0'));
}
encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`);
}
} else {
const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor;
// this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes
encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT));
}
return encodingOrHashInto;
}
static hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' };
static readonly byteLength: number;
abstract readonly byteLength: number;
}
export class SHA1 extends BaseHash<SHA1> {
constructor() { super('sha1'); }
static override readonly byteLength = 20;
override readonly byteLength = 20;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class MD4 extends BaseHash<MD4> {
constructor() { //! Not supported by nodecrypto
const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] };
function digest(): Buffer;
function digest(encoding: nodecrypto.BinaryToTextEncoding): string;
function digest(encoding?: nodecrypto.BinaryToTextEncoding) {
const buf = Buffer.from(hash.arrayBuffer());
if (encoding) return buf.toString(encoding);
else return buf;
}
function update(data: nodecrypto.BinaryLike) {
if (typeof data === 'string') hash._update(data);
else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data));
else hash._update(new Uint8Array(data.buffer));
return hash as unknown as MD4HashImpl;
}
type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update };
// @ts-expect-error patches to reuse the BaseHash methods
hash.digest = digest; hash._update = hash.update; hash.update = update;
super(hash as unknown as MD4HashImpl);
}
static override readonly byteLength = 16;
override readonly byteLength = 16;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class MD5 extends BaseHash<MD5> {
constructor() { super('md5'); }
static override readonly byteLength = 16;
override readonly byteLength = 16;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class SHA224 extends BaseHash<SHA224> {
constructor() { super('sha224'); }
static override readonly byteLength = 28;
override readonly byteLength = 28;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class SHA512 extends BaseHash<SHA512> {
constructor() { super('sha512'); }
static override readonly byteLength = 64;
override readonly byteLength = 64;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class SHA384 extends BaseHash<SHA384> {
constructor() { super('sha384'); }
static override readonly byteLength = 48;
override readonly byteLength = 48;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class SHA256 extends BaseHash<SHA256> {
constructor() { super('sha256'); }
static override readonly byteLength = 32;
override readonly byteLength = 32;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}
export class SHA512_256 extends BaseHash<SHA512_256> {
constructor() { super('sha512-256'); }
static override readonly byteLength = 32;
override readonly byteLength = 32;
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
const instance = new this(); instance.update(data);
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
}
}

View File

@@ -0,0 +1,97 @@
import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun';
import { transformSync, scan, init } from 'bun-wasm';
import { Message } from 'bun-wasm/schema';
import $ from 'chalk';
await init();
enum InternalImportKind {
'entry-point-run' = 1, // entry_point_run
'entry-point-build' = 2, // entry_point_build
'import-statement' = 3, // stmt
'require-call' = 4, // require
'dynamic-import' = 5, // dynamic
'require-resolve' = 6, // require_resolve
'import-rule' = 7, // at
'url-token' = 8, // url
'internal' = 9, // internal
}
export type ScanImportsEntry = {
kind: 'import-statement' | 'dynamic-import';
path: string;
};
export default class Transpiler implements BunTranspiler {
constructor(options?: TranspilerOptions) {
this.#options = options ?? {};
this.#rootFile = 'input.tsx'; // + (this.#options.loader ?? 'tsx');
//? ^ NOTE: with current bun-wasm builds, the loader option is ignored and hardcoded to tsx
}
#options: TranspilerOptions;
#rootFile: string;
#decoder?: TextDecoder;
#internallyCalled: boolean = false;
async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> {
this.#internallyCalled = true;
return this.transformSync(code, loader);
}
transformSync(code: StringOrBuffer, ctx: object): string;
transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string;
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string;
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string {
if (!code) return ''; // wasm dies with empty string input
if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
if (typeof loader !== 'string') loader = this.#options.loader;
const result = transformSync(code, this.#rootFile, loader);
// status 1 = success, status 2 = error
if (result.status === 2) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.transform : this.transformSync);
this.#internallyCalled = false;
this.#decoder ??= new TextDecoder();
return this.#decoder.decode(result.files[0].data);
}
scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } {
if (!code) return { exports: [], imports: [] }; // wasm dies with empty string input
if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
const result = scan(code, this.#rootFile, this.#options.loader);
if (result.errors.length) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.scanImports : this.scan);
this.#internallyCalled = false;
result.imports.forEach(imp => (imp.kind as unknown) = InternalImportKind[imp.kind]);
return {
exports: result.exports,
imports: result.imports as unknown as Import[],
};
}
scanImports(code: StringOrBuffer): ScanImportsEntry[] {
this.#internallyCalled = true;
return this.scan(code).imports.filter(imp => imp.kind === 'import-statement' || imp.kind === 'dynamic-import') as ScanImportsEntry[];
}
}
function formatBuildErrors(buildErrors: Message[], caller: Transpiler[keyof Transpiler]): AggregateError {
const formatted = buildErrors.map(err => {
const loc = err.data.location;
const str = `${$.redBright('error')}${$.gray(':')} ${$.bold(err.data.text)}\n` +
(loc
? `${highlightErrorChar(loc.line_text, loc.offset)}\n` +
$.redBright.bold('^'.padStart(loc.column)) + '\n' +
`${$.bold(loc.file)}${$.gray(':')}${$.yellowBright(loc.line)}${$.gray(':')}${$.yellowBright(loc.column)} ${$.gray(loc.offset)}`
: ''
);
return { __proto__: Error.prototype, stack: str };
});
const aggregate = new AggregateError(formatted, `Input code has ${formatted.length} error${formatted.length === 1 ? '' : 's'}`);
Error.captureStackTrace(aggregate, caller);
aggregate.name = 'BuildError';
return aggregate;
}
function highlightErrorChar(str: string, at: number): string {
return str.slice(0, at) + $.red(str[at]) + str.slice(at + 1);
}

View File

@@ -0,0 +1,111 @@
import type jsc from 'bun:jsc';
import v8 from 'node:v8';
//import { setRandomSeed, getRandomSeed } from './mathrandom.js';
import { NotImplementedError, getCallSites } from '../utils/errors.js';
import { gc } from './bun.js';
const STUB = () => void 0;
function jscSerialize(value: any, options?: { binaryType: 'nodebuffer'; }): Buffer;
function jscSerialize(value: any, options?: { binaryType?: 'arraybuffer'; }): SharedArrayBuffer;
function jscSerialize(value: any, options?: { binaryType?: string }): Buffer | SharedArrayBuffer {
const serialized = v8.serialize(value);
if (options?.binaryType === 'nodebuffer') return serialized;
else return new SharedArrayBuffer(serialized.byteLength);
}
// TODO: Investigate ways of making these the actual JSC serialization format (probably Bun WASM)
// TODO: whilst this works for common use-cases like Node <-> Node it still does not make it
// TODO: possible for Node <-> Bun transfers of this kind of data, which might be interesting to have.
export const serialize = jscSerialize satisfies typeof jsc.serialize;
export const deserialize = (value => {
if (value instanceof ArrayBuffer || value instanceof SharedArrayBuffer) return v8.deserialize(Buffer.from(value));
else return v8.deserialize(value);
}) satisfies typeof jsc.deserialize;
export const setTimeZone = ((timeZone: string) => {
const resolvedTZ = Intl.DateTimeFormat(undefined, { timeZone }).resolvedOptions().timeZone;
return process.env.TZ = resolvedTZ;
}) satisfies typeof jsc.setTimeZone;
export const callerSourceOrigin = (() => {
const callsites: NodeJS.CallSite[] = getCallSites(2);
// This may be inaccurate with async code. Needs more testing.
let lastSeenURL = '';
for (const callsite of callsites) {
const sourceURL = callsite.getScriptNameOrSourceURL();
if (sourceURL.startsWith('file://')) lastSeenURL = sourceURL;
}
return lastSeenURL;
}) satisfies typeof jsc.callerSourceOrigin;
// TODO: Like with jsc.serialize/deserialize, these may be possible with Bun WASM.
export const jscDescribe = (() => { throw new NotImplementedError('jsc.jscDescribe', STUB); }) satisfies typeof jsc.jscDescribe;
export const jscDescribeArray = (() => { throw new NotImplementedError('jsc.jscDescribeArray', STUB); }) satisfies typeof jsc.jscDescribeArray;
// These are no longer documented but still exist.
export const describe = jscDescribe;
export const describeArray = jscDescribeArray;
// Node.js only provides a singular non-configurable global GC function, so we have to make do with that.
export const edenGC = gc satisfies typeof jsc.edenGC;
export const fullGC = gc satisfies typeof jsc.fullGC;
export const gcAndSweep = gc satisfies typeof jsc.gcAndSweep;
export const drainMicrotasks = STUB satisfies typeof jsc.drainMicrotasks; // no-op
export const releaseWeakRefs = STUB satisfies typeof jsc.releaseWeakRefs; // no-op
export const startSamplingProfiler = STUB satisfies typeof jsc.startSamplingProfiler; // no-op
//! likely broken but needs more testing
export const startRemoteDebugger = STUB satisfies typeof jsc.startRemoteDebugger; // no-op
//! this is a really poor polyfill but it's better than nothing
export const getProtectedObjects = (() => { return [globalThis]; }) satisfies typeof jsc.getProtectedObjects;
export const getRandomSeed = 0; // TODO
export const setRandomSeed = 0; // TODO
export const heapSize = (() => { return v8.getHeapStatistics().used_heap_size; }) satisfies typeof jsc.heapSize;
export const heapStats = (() => {
const stats = v8.getHeapStatistics();
return {
heapSize: stats.used_heap_size,
heapCapacity: stats.total_available_size,
extraMemorySize: stats.external_memory ?? 0,
objectCount: 1, // TODO: how to get this in node?
protectedObjectCount: getProtectedObjects().length,
globalObjectCount: 2, // TODO: this one is probably fine hardcoded but is there a way to get this in node?
protectedGlobalObjectCount: 1, // TODO: ^
objectTypeCounts: {}, //! can't really throw an error here, so just return an empty object (TODO: how to get this in node?)
protectedObjectTypeCounts: {} //! can't really throw an error here, so just return an empty object (TODO: how to get this in node?)
};
}) satisfies typeof jsc.heapStats;
//! doubtful anyone relies on the return of this for anything besides debugging
export const isRope = (() => false) satisfies typeof jsc.isRope;
export const memoryUsage = (() => {
const stats = v8.getHeapStatistics();
const resUse = process.resourceUsage();
return {
current: stats.malloced_memory,
peak: stats.peak_malloced_memory,
currentCommit: stats.malloced_memory,
peakCommit: stats.malloced_memory,
pageFaults: resUse.minorPageFault + resUse.majorPageFault
};
}) satisfies typeof jsc.memoryUsage;
//! these are likely broken, seemingly always returning undefined which does not match the documented return types
export const noFTL = (() => { return void 0 as unknown as Function; }) satisfies typeof jsc.noFTL;
export const noOSRExitFuzzing = (() => { return void 0 as unknown as Function; }) satisfies typeof jsc.noOSRExitFuzzing;
//! likely broken, seems to always returns zero
export const totalCompileTime = (() => 0) satisfies typeof jsc.totalCompileTime;
//! likely broken, seem to always returns 0 if any arguments are passed, undefined otherwise
export const numberOfDFGCompiles = ((...args) => args.length ? 0 : void 0 as unknown as number) satisfies typeof jsc.numberOfDFGCompiles;
export const reoptimizationRetryCount = ((...args) => args.length ? 0 : void 0 as unknown as number) satisfies typeof jsc.reoptimizationRetryCount;
//! The following are very likely impossible to ever polyfill.
export const profile = (() => {
throw new NotImplementedError('jsc.profile is not polyfillable', STUB, true);
}) satisfies typeof jsc.profile;
export const optimizeNextInvocation = (() => {
throw new NotImplementedError('jsc.optimizeNextInvocation is not polyfillable', STUB, true);
}) satisfies typeof jsc.optimizeNextInvocation;

View File

@@ -0,0 +1,30 @@
import bun from './index.js';
import * as jsc from './modules/jsc.js';
// This file serves two purposes:
// 1. It is the entry point for using the Bun global in the REPL. (--import this file)
// 2. It makes TypeScript check the full structural compatibility of the Bun global vs the polyfills object,
// which allows for the type assertion below to be used as a TODO list index.
globalThis.Bun = bun as typeof bun & {
// TODO: Missing polyfills
readableStreamToFormData: typeof import('bun').readableStreamToFormData;
deepEquals: typeof import('bun').deepEquals;
deepMatch: typeof import('bun').deepMatch;
build: typeof import('bun').build;
mmap: typeof import('bun').mmap;
connect: typeof import('bun').connect;
listen: typeof import('bun').listen;
password: typeof import('bun').password;
CryptoHashInterface: typeof import('bun').CryptoHashInterface;
CryptoHasher: typeof import('bun').CryptoHasher;
FileSystemRouter: typeof import('bun').FileSystemRouter;
//? Polyfilled but with broken types (See each one in ./src/modules/bun.ts for details)
generateHeapSnapshot: typeof import('bun').generateHeapSnapshot;
stdout: typeof import('bun').stdout;
stderr: typeof import('bun').stderr;
stdin: typeof import('bun').stdin;
};
Reflect.set(globalThis, 'jsc', jsc);

View File

@@ -0,0 +1,13 @@
type AnyFunction = (...args: any[]) => any;
type AnyClass = new (...args: any[]) => any;
type AnyCallable = AnyFunction | AnyClass;
type MapKeysType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? K : never;
type MapValuesType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? V : never;
type Mutable<T> = { -readonly [K in keyof T]: T[K] };
/** Excluding the BigInt typed arrays */
type TypedArrayConstructor =
| typeof Uint8Array | typeof Uint16Array | typeof Uint32Array | typeof Uint8ClampedArray
| typeof Int8Array | typeof Int16Array | typeof Int32Array | typeof Float32Array | typeof Float64Array;

View File

@@ -0,0 +1,72 @@
declare module 'js-md4' {
export type MD4Input = string | ArrayBuffer | Uint8Array | number[];
interface md4 {
/**
* # Broken, will throw an error.
* @deprecated Use {@link md4.hex} instead.
*/
(input: MD4Input): never;
/** Creates an `Md4` hasher instance. */
create(): Md4;
/** Shortcut for `md4.create().update(...)` */
update(message: MD4Input): Md4;
/** Hash `message` into a hex string. */
hex(message: MD4Input): string;
/** Hash `message` into an Array. */
array(message: MD4Input): number[];
/** Identical to {@link md4.array}. */
digest(message: MD4Input): number[];
/**
* Identical to {@link md4.arrayBuffer}.
* @deprecated Use {@link md4.arrayBuffer} instead.
*/
buffer(message: MD4Input): ArrayBuffer;
/** Hash `message` into an ArrayBuffer. */
arrayBuffer(message: MD4Input): ArrayBuffer;
}
export type Md4 = Md4;
declare class Md4 {
private constructor();
private toString(): string;
private finalize(): void;
private hash(): void;
/**
* Append `message` to the internal hash source data.
* @returns A reference to `this` for chaining, or nothing if the instance has been finalized.
*/
update(message: MD4Input): this | void;
/** Hash into a hex string. Finalizes the hash. */
hex(): string;
/** Hash into an Array. Finalizes the hash. */
array(): number[];
/** Identical to {@link Md4.array}. */
digest(): number[];
/**
* Identical to {@link Md4.arrayBuffer}.
* @deprecated Use {@link Md4.arrayBuffer} instead.
*/
buffer(): ArrayBuffer;
/** Hash into an ArrayBuffer. Finalizes the hash. */
arrayBuffer(): ArrayBuffer;
private buffer8: Uint8Array;
private blocks: Uint32Array;
private bytes: number;
private start: number;
private h3: number;
private h2: number;
private h1: number;
private h0: number;
readonly hashed: boolean;
/** If true, `update()` operations will silently fail. */
readonly finalized: boolean;
readonly first: boolean;
private lastByteIndex?: number;
}
const md4: md4;
export default md4;
}

View File

@@ -0,0 +1,30 @@
// This file explicitly redefines global types used in order to enforce the correct types,
// regardless of the arbitrary order in which TSC/TSServer decide to load the type libraries in.
// Annoyingly, even this file can sometimes break, so if your types are inverted, try restarting TSServer.
import '@types/node';
declare module 'stream/web' {
interface ReadableStreamDefaultReader {
readMany(): Promise<ReadableStreamDefaultReadManyResult<any>>;
}
}
declare global {
var performance: typeof import('perf_hooks').performance;
// TODO: These should be contributed to @types/node upstream
namespace NodeJS {
interface CallSite {
getScriptNameOrSourceURL(): string;
getEnclosingColumnNumber(): number;
getEnclosingLineNumber(): number;
getPosition(): number;
getPromiseIndex(): number;
getScriptHash(): string;
isAsync(): boolean;
isPromiseAll(): boolean;
toString(): string;
}
}
}

View File

@@ -0,0 +1,24 @@
interface V8HeapSnapshot {
snapshot: {
meta: {
node_fields: string[],
node_types: [string[], ...string[]],
edge_fields: string[],
edge_types: [string[], ...string[]],
trace_function_info_fields: string[],
trace_node_fields: string[],
sample_fields: string[],
location_fields: string[]
},
node_count: number,
edge_count: number,
trace_function_count: number
},
nodes: number[],
edges: number[],
trace_function_infos: unknown[],
trace_tree: unknown[],
samples: unknown[],
locations: number[],
strings: string[]
}

View File

@@ -0,0 +1,230 @@
type PosixErrNo = MapKeysType<ReturnType<typeof getPosixSystemErrorMap>>;
type Win32ErrNo = MapKeysType<ReturnType<typeof getWin32SystemErrorMap>>;
export function getCallSites(sliceOff = 1) {
const originalPST = Error.prepareStackTrace;
Error.prepareStackTrace = (error, stack) => stack;
const { stack } = new Error();
if (stack?.constructor.name !== 'Array') throw new Error('Failed to acquire structured JS stack trace');
Error.prepareStackTrace = originalPST;
return (stack as unknown as NodeJS.CallSite[]).slice(sliceOff);
}
export function getPosixSystemErrorMap() {
return new Map([
[ -7, [ 'E2BIG', 'argument list too long' ] ],
[ -13, [ 'EACCES', 'permission denied' ] ],
[ -98, [ 'EADDRINUSE', 'address already in use' ] ],
[ -99, [ 'EADDRNOTAVAIL', 'address not available' ] ],
[ -97, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
[ -11, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
[ -114, [ 'EALREADY', 'connection already in progress' ] ],
[ -9, [ 'EBADF', 'bad file descriptor' ] ],
[ -16, [ 'EBUSY', 'resource busy or locked' ] ],
[ -125, [ 'ECANCELED', 'operation canceled' ] ],
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
[ -103, [ 'ECONNABORTED', 'software caused connection abort' ] ],
[ -111, [ 'ECONNREFUSED', 'connection refused' ] ],
[ -104, [ 'ECONNRESET', 'connection reset by peer' ] ],
[ -89, [ 'EDESTADDRREQ', 'destination address required' ] ],
[ -17, [ 'EEXIST', 'file already exists' ] ],
[ -14, [ 'EFAULT', 'bad address in system call argument' ] ],
[ -27, [ 'EFBIG', 'file too large' ] ],
[ -113, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
[ -4, [ 'EINTR', 'interrupted system call' ] ],
[ -22, [ 'EINVAL', 'invalid argument' ] ],
[ -5, [ 'EIO', 'i/o error' ] ],
[ -106, [ 'EISCONN', 'socket is already connected' ] ],
[ -21, [ 'EISDIR', 'illegal operation on a directory' ] ],
[ -40, [ 'ELOOP', 'too many symbolic links encountered' ] ],
[ -24, [ 'EMFILE', 'too many open files' ] ],
[ -90, [ 'EMSGSIZE', 'message too long' ] ],
[ -36, [ 'ENAMETOOLONG', 'name too long' ] ],
[ -100, [ 'ENETDOWN', 'network is down' ] ],
[ -101, [ 'ENETUNREACH', 'network is unreachable' ] ],
[ -23, [ 'ENFILE', 'file table overflow' ] ],
[ -105, [ 'ENOBUFS', 'no buffer space available' ] ],
[ -19, [ 'ENODEV', 'no such device' ] ],
[ -2, [ 'ENOENT', 'no such file or directory' ] ],
[ -12, [ 'ENOMEM', 'not enough memory' ] ],
[ -64, [ 'ENONET', 'machine is not on the network' ] ],
[ -92, [ 'ENOPROTOOPT', 'protocol not available' ] ],
[ -28, [ 'ENOSPC', 'no space left on device' ] ],
[ -38, [ 'ENOSYS', 'function not implemented' ] ],
[ -107, [ 'ENOTCONN', 'socket is not connected' ] ],
[ -20, [ 'ENOTDIR', 'not a directory' ] ],
[ -39, [ 'ENOTEMPTY', 'directory not empty' ] ],
[ -88, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
[ -95, [ 'ENOTSUP', 'operation not supported on socket' ] ],
[ -75, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
[ -1, [ 'EPERM', 'operation not permitted' ] ],
[ -32, [ 'EPIPE', 'broken pipe' ] ],
[ -71, [ 'EPROTO', 'protocol error' ] ],
[ -93, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
[ -91, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
[ -34, [ 'ERANGE', 'result too large' ] ],
[ -30, [ 'EROFS', 'read-only file system' ] ],
[ -108, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
[ -29, [ 'ESPIPE', 'invalid seek' ] ],
[ -3, [ 'ESRCH', 'no such process' ] ],
[ -110, [ 'ETIMEDOUT', 'connection timed out' ] ],
[ -26, [ 'ETXTBSY', 'text file is busy' ] ],
[ -18, [ 'EXDEV', 'cross-device link not permitted' ] ],
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
[ -4095, [ 'EOF', 'end of file' ] ],
[ -6, [ 'ENXIO', 'no such device or address' ] ],
[ -31, [ 'EMLINK', 'too many links' ] ],
[ -112, [ 'EHOSTDOWN', 'host is down' ] ],
[ -121, [ 'EREMOTEIO', 'remote I/O error' ] ],
[ -25, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
[ -84, [ 'EILSEQ', 'illegal byte sequence' ] ],
[ -94, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
] as const);
}
export function getWin32SystemErrorMap() {
return new Map([
[ -4093, [ 'E2BIG', 'argument list too long' ] ],
[ -4092, [ 'EACCES', 'permission denied' ] ],
[ -4091, [ 'EADDRINUSE', 'address already in use' ] ],
[ -4090, [ 'EADDRNOTAVAIL', 'address not available' ] ],
[ -4089, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
[ -4088, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
[ -4084, [ 'EALREADY', 'connection already in progress' ] ],
[ -4083, [ 'EBADF', 'bad file descriptor' ] ],
[ -4082, [ 'EBUSY', 'resource busy or locked' ] ],
[ -4081, [ 'ECANCELED', 'operation canceled' ] ],
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
[ -4079, [ 'ECONNABORTED', 'software caused connection abort' ] ],
[ -4078, [ 'ECONNREFUSED', 'connection refused' ] ],
[ -4077, [ 'ECONNRESET', 'connection reset by peer' ] ],
[ -4076, [ 'EDESTADDRREQ', 'destination address required' ] ],
[ -4075, [ 'EEXIST', 'file already exists' ] ],
[ -4074, [ 'EFAULT', 'bad address in system call argument' ] ],
[ -4036, [ 'EFBIG', 'file too large' ] ],
[ -4073, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
[ -4072, [ 'EINTR', 'interrupted system call' ] ],
[ -4071, [ 'EINVAL', 'invalid argument' ] ],
[ -4070, [ 'EIO', 'i/o error' ] ],
[ -4069, [ 'EISCONN', 'socket is already connected' ] ],
[ -4068, [ 'EISDIR', 'illegal operation on a directory' ] ],
[ -4067, [ 'ELOOP', 'too many symbolic links encountered' ] ],
[ -4066, [ 'EMFILE', 'too many open files' ] ],
[ -4065, [ 'EMSGSIZE', 'message too long' ] ],
[ -4064, [ 'ENAMETOOLONG', 'name too long' ] ],
[ -4063, [ 'ENETDOWN', 'network is down' ] ],
[ -4062, [ 'ENETUNREACH', 'network is unreachable' ] ],
[ -4061, [ 'ENFILE', 'file table overflow' ] ],
[ -4060, [ 'ENOBUFS', 'no buffer space available' ] ],
[ -4059, [ 'ENODEV', 'no such device' ] ],
[ -4058, [ 'ENOENT', 'no such file or directory' ] ],
[ -4057, [ 'ENOMEM', 'not enough memory' ] ],
[ -4056, [ 'ENONET', 'machine is not on the network' ] ],
[ -4035, [ 'ENOPROTOOPT', 'protocol not available' ] ],
[ -4055, [ 'ENOSPC', 'no space left on device' ] ],
[ -4054, [ 'ENOSYS', 'function not implemented' ] ],
[ -4053, [ 'ENOTCONN', 'socket is not connected' ] ],
[ -4052, [ 'ENOTDIR', 'not a directory' ] ],
[ -4051, [ 'ENOTEMPTY', 'directory not empty' ] ],
[ -4050, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
[ -4049, [ 'ENOTSUP', 'operation not supported on socket' ] ],
[ -4026, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
[ -4048, [ 'EPERM', 'operation not permitted' ] ],
[ -4047, [ 'EPIPE', 'broken pipe' ] ],
[ -4046, [ 'EPROTO', 'protocol error' ] ],
[ -4045, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
[ -4044, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
[ -4034, [ 'ERANGE', 'result too large' ] ],
[ -4043, [ 'EROFS', 'read-only file system' ] ],
[ -4042, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
[ -4041, [ 'ESPIPE', 'invalid seek' ] ],
[ -4040, [ 'ESRCH', 'no such process' ] ],
[ -4039, [ 'ETIMEDOUT', 'connection timed out' ] ],
[ -4038, [ 'ETXTBSY', 'text file is busy' ] ],
[ -4037, [ 'EXDEV', 'cross-device link not permitted' ] ],
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
[ -4095, [ 'EOF', 'end of file' ] ],
[ -4033, [ 'ENXIO', 'no such device or address' ] ],
[ -4032, [ 'EMLINK', 'too many links' ] ],
[ -4031, [ 'EHOSTDOWN', 'host is down' ] ],
[ -4030, [ 'EREMOTEIO', 'remote I/O error' ] ],
[ -4029, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
[ -4027, [ 'EILSEQ', 'illegal byte sequence' ] ],
[ -4025, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
] as const);
}
export function getPosixToWin32SystemErrorMap() {
const posixEntries = [...getPosixSystemErrorMap().entries()];
const win32Entries = [...getWin32SystemErrorMap().entries()];
const map: Map<PosixErrNo, Win32ErrNo> = new Map();
posixEntries.forEach(([code, val]) => {
const found = win32Entries.find(([_, v]) => v[0] === val[0]);
if (!found) console.error(val[0]);
else map.set(code, found[0]);
});
return map;
}
export function getPlatformSystemErrorFromPosix(posixErrNo: PosixErrNo) {
if (process.platform === 'win32') {
const win32errno = getPosixToWin32SystemErrorMap().get(posixErrNo)!;
return getWin32SystemErrorMap().get(win32errno);
} else {
return getPosixSystemErrorMap().get(posixErrNo);
}
}
export class SystemError extends Error {
constructor(errno: PosixErrNo, syscall?: string, errpath?: string) {
const [errname, errmsg] = getPlatformSystemErrorFromPosix(errno) ?? ['SystemError', 'Unknown system error'];
super(errmsg);
this.name = errname;
this.code = errname;
this.errno = errno;
if (syscall) this.syscall = syscall;
if (errpath) this.path = errpath;
}
errno?: number | undefined;
code?: string | undefined;
path?: string | undefined;
syscall?: string | undefined;
}
export class NotImplementedError extends Error {
constructor(thing: string, func: AnyCallable = NotImplementedError, overrideMsg: boolean = false) {
super(overrideMsg ? thing : `A polyfill for ${thing} is not yet implemented by bun-polyfills.`);
this.name = 'NotImplementedError';
Error.captureStackTrace(this, func);
}
}

View File

@@ -0,0 +1,36 @@
import streams from 'node:stream';
import type { SpawnOptions, FileBlob } from 'bun';
export const getter = <T>(obj: T, key: string | symbol, get: () => any, enumerable = false, configurable = true): void => {
Object.defineProperty(obj, key, { get, configurable, enumerable });
};
export const setter = <T>(obj: T, key: string | symbol, set: () => any, enumerable = false, configurable = true): void => {
Object.defineProperty(obj, key, { set, configurable, enumerable });
};
export const readonly = <T>(obj: T, key: string | symbol, value: unknown, enumerable = false, configurable = true): void => {
Object.defineProperty(obj, key, { value, configurable, enumerable });
};
export function streamToBuffer(stream: streams.Readable | streams.Duplex): Promise<Buffer> {
return new Promise((resolve, reject) => {
const buffers: Uint8Array[] = [];
stream.on("data", (chunk: Uint8Array) => buffers.push(chunk));
stream.on("end", () => resolve(Buffer.concat(buffers)));
stream.on("error", (err: Error) => reject(err));
});
}
export function isArrayBufferView(value: any): value is ArrayBufferView {
return value !== null && typeof value === 'object' &&
value.buffer instanceof ArrayBuffer && typeof value.byteLength === 'number' && typeof value.byteOffset === 'number';
}
export function isOptions(options: any): options is SpawnOptions.OptionsObject {
return options !== null && typeof options === 'object';
}
export function isFileBlob(blob: any): blob is FileBlob {
return blob instanceof Blob && Reflect.get(blob, 'readable') instanceof ReadableStream && typeof Reflect.get(blob, 'writer') === 'function';
}

View File

@@ -0,0 +1,41 @@
import path from 'path';
const abort = (...msg: string[]): never => (console.error(...msg), process.exit(1));
const makefilePath = path.resolve(import.meta.dir, '../../../Makefile');
const makefile = Bun.file(makefilePath);
if (!await makefile.exists()) abort('Makefile not found at', makefilePath);
const makefileContent = await makefile.text();
const matched = makefileContent.match(/^BUN_BASE_VERSION\s*=\s*(\d+.\d+)/m);
if (!matched) abort('Could not find BUN_BASE_VERSION in Makefile');
const buildidPath = path.resolve(import.meta.dir, '../../../src/build-id');
const buildid = Bun.file(buildidPath);
if (!await buildid.exists()) abort('Build ID file not found at', buildidPath);
const [, BUN_BASE_VERSION] = matched!;
const BUN_VERSION = `${BUN_BASE_VERSION}.${await buildid.text()}`.trim();
const bunTsPath = path.resolve(import.meta.dir, '../src/modules/bun.ts');
const bunTs = Bun.file(bunTsPath);
if (!await bunTs.exists()) abort('bun.ts source file not found at', bunTsPath);
const bunTsContent = await bunTs.text();
const bunTsContentNew = bunTsContent.replace(
/^export const version = '.+' satisfies typeof Bun.version;$/m,
`export const version = '${BUN_VERSION}' satisfies typeof Bun.version;`
);
if (bunTsContentNew !== bunTsContent) console.info('Updated Bun.version polyfill to', BUN_VERSION);
const git = Bun.spawnSync({ cmd: ['git', 'rev-parse', 'HEAD'] });
if (!git.success) abort('Could not get git HEAD commit hash');
const BUN_REVISION = git.stdout.toString('utf8').trim();
const bunTsContentNewer = bunTsContentNew.replace(
/^export const revision = '.+' satisfies typeof Bun.revision;$/m,
`export const revision = '${BUN_REVISION}' satisfies typeof Bun.revision;`
);
if (bunTsContentNewer !== bunTsContentNew) console.info('Updated Bun.revision polyfill to', BUN_REVISION);
Bun.write(bunTs, bunTsContentNewer);

View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"moduleDetection": "force",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"inlineSourceMap": true,
"allowJs": true,
"outDir": "dist",
"types": ["node"]
},
"include": ["src", "lib", "../bun-types/index.d.ts"]
}

File diff suppressed because it is too large Load Diff

View File

@@ -46,6 +46,8 @@ interface ImportMeta {
*
* In production, `data` is inlined to be `{}`. This is handy because Bun
* knows it can minify `{}.prop ??= value` into `value` in production.
*
*
*/
data: any;

View File

@@ -39,7 +39,7 @@ declare module "bun" {
*
* Does not preserve insertion order. Well-known header names are lowercased. Other header names are left as-is.
*/
toJSON(): Record<string, string> & { "set-cookie"?: string[] };
toJSON(): Record<string, string>;
/**
* Get the total number of headers

View File

@@ -3,15 +3,16 @@ declare module "bun" {
type NodeWorkerThreadsWorker = import("node:worker_threads").Worker;
type LibWorkerOrBunWorker = Bun.__internal.UseLibDomIfAvailable<"Worker", Bun.Worker>;
type NodePerfHooksPerformance = import("node:perf_hooks").Performance;
type LibPerformanceOrNodePerfHooksPerformance = Bun.__internal.UseLibDomIfAvailable<
"Performance",
import("perf_hooks").Performance
NodePerfHooksPerformance
>;
type NodeCryptoWebcryptoSubtleCrypto = import("crypto").webcrypto.SubtleCrypto;
type NodeCryptoWebcryptoCryptoKey = import("crypto").webcrypto.CryptoKey;
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
type LibEmptyOrWSWebSocket = LibDomIsLoaded extends true ? {} : import("ws").WebSocket;
type LibEmptyOrNodeUtilTextEncoder = LibDomIsLoaded extends true ? {} : import("node:util").TextEncoder;
@@ -25,6 +26,10 @@ declare module "bun" {
? {}
: import("node:stream/web").WritableStream<T>;
type LibEmptyOrNodeTransformStream<I, O> = LibDomIsLoaded extends true
? {}
: import("node:stream/web").TransformStream<I, O>;
type LibEmptyOrNodeMessagePort = LibDomIsLoaded extends true ? {} : import("node:worker_threads").MessagePort;
}
}
@@ -63,71 +68,15 @@ declare var Worker: Bun.__internal.UseLibDomIfAvailable<
}
>;
/**
* A WebSocket client implementation.
*/
interface WebSocket extends Bun.__internal.LibEmptyOrBunWebSocket {}
interface WebSocket extends Bun.__internal.LibEmptyOrWSWebSocket {}
/**
* A WebSocket client implementation
*
* If `DOM` is included in tsconfig `lib`, this falls back to the default DOM global `WebSocket`.
* Otherwise (when outside of a browser environment), this will be the `WebSocket`
* implementation from the `ws` package, which Bun implements.
*/
declare var WebSocket: Bun.__internal.UseLibDomIfAvailable<
"WebSocket",
{
prototype: WebSocket;
/**
* Creates a new WebSocket instance with the given URL and options.
*
* @param url The URL to connect to.
* @param options The options to use for the connection.
*
* @example
* ```ts
* const ws = new WebSocket("wss://dev.local", {
* protocols: ["proto1", "proto2"],
* headers: {
* "Cookie": "session=123456",
* },
* });
* ```
*/
new (url: string | URL, options?: Bun.WebSocketOptions): WebSocket;
/**
* Creates a new WebSocket instance with the given URL and protocols.
*
* @param url The URL to connect to.
* @param protocols The protocols to use for the connection.
*
* @example
* ```ts
* const ws = new WebSocket("wss://dev.local");
* const ws = new WebSocket("wss://dev.local", ["proto1", "proto2"]);
* ```
*/
new (url: string | URL, protocols?: string | string[]): WebSocket;
/**
* The connection is not yet open
*/
readonly CONNECTING: 0;
/**
* The connection is open and ready to communicate
*/
readonly OPEN: 1;
/**
* The connection is in the process of closing
*/
readonly CLOSING: 2;
/**
* The connection is closed or couldn't be opened
*/
readonly CLOSED: 3;
}
>;
declare var WebSocket: Bun.__internal.UseLibDomIfAvailable<"WebSocket", typeof import("ws").WebSocket>;
interface Crypto {
readonly subtle: SubtleCrypto;
@@ -276,24 +225,26 @@ interface File extends Blob {
readonly lastModified: number;
readonly name: string;
}
declare var File: Bun.__internal.UseLibDomIfAvailable<
"File",
{
prototype: File;
/**
* Create a new [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
*
* @param `parts` - An array of strings, numbers, BufferSource, or [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects
* @param `name` - The name of the file
* @param `options` - An object containing properties to be added to the [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
*/
new (
parts: Bun.BlobPart[],
name: string,
options?: BlobPropertyBag & { lastModified?: Date | number | undefined },
): File;
}
>;
declare var File: typeof globalThis extends { onabort: any }
? typeof globalThis extends { File: infer T }
? T
: never
: {
prototype: File;
/**
* Create a new [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
*
* @param `parts` - An array of strings, numbers, BufferSource, or [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects
* @param `name` - The name of the file
* @param `options` - An object containing properties to be added to the [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
*/
new (
parts: Bun.BlobPart[],
name: string,
options?: BlobPropertyBag & { lastModified?: Date | number | undefined },
): File;
};
/**
* ShadowRealms are a distinct global environment, with its own global object
@@ -1312,7 +1263,7 @@ interface PromiseConstructor {
* This is useful when you want to return a Promise and have code outside the Promise
* resolve or reject it.
*
* @example
* ## Example
* ```ts
* const { promise, resolve, reject } = Promise.withResolvers();
*
@@ -1322,6 +1273,8 @@ interface PromiseConstructor {
*
* await promise; // "Hello world!"
* ```
*
* `Promise.withResolvers()` is a [stage3 proposal](https://github.com/tc39/proposal-promise-with-resolvers).
*/
withResolvers<T>(): {
promise: Promise<T>;
@@ -1408,7 +1361,7 @@ interface Blob {
/**
* Returns a readable stream of the blob's contents
*/
stream(): ReadableStream<Uint8Array>;
stream(): ReadableStream;
}
declare var Blob: Bun.__internal.UseLibDomIfAvailable<
@@ -1830,40 +1783,14 @@ interface BunFetchRequestInit extends RequestInit {
/**
* Override http_proxy or HTTPS_PROXY
* This is a custom property that is not part of the Fetch API specification.
*
* @example
* ```js
* const response = await fetch("http://example.com", {
* proxy: "https://username:password@127.0.0.1:8080"
* });
* ```
*/
proxy?: string;
/**
* Override the default S3 options
*
* @example
* ```js
* const response = await fetch("s3://bucket/key", {
* s3: {
* accessKeyId: "AKIAIOSFODNN7EXAMPLE",
* secretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
* region: "us-east-1",
* }
* });
* ```
*/
s3?: Bun.S3Options;
/**
* Make the request over a Unix socket
*
* @example
* ```js
* const response = await fetch("http://example.com", { unix: "/path/to/socket" });
* ```
*/
unix?: string;
}

View File

@@ -19,8 +19,6 @@
/// <reference path="./overrides.d.ts" />
/// <reference path="./deprecated.d.ts" />
/// <reference path="./redis.d.ts" />
/// <reference path="./shell.d.ts" />
/// <reference path="./bun.ns.d.ts" />
// @ts-ignore Must disable this so it doesn't conflict with the DOM onmessage type, but still

View File

@@ -8,8 +8,24 @@ declare module "bun:jsc" {
function fullGC(): number;
function edenGC(): number;
function heapSize(): number;
function heapStats(): HeapStats;
function memoryUsage(): MemoryUsage;
function heapStats(): {
heapSize: number;
heapCapacity: number;
extraMemorySize: number;
objectCount: number;
protectedObjectCount: number;
globalObjectCount: number;
protectedGlobalObjectCount: number;
objectTypeCounts: Record<string, number>;
protectedObjectTypeCounts: Record<string, number>;
};
function memoryUsage(): {
current: number;
peak: number;
currentCommit: number;
peakCommit: number;
pageFaults: number;
};
function getRandomSeed(): number;
function setRandomSeed(value: number): void;
function isRope(input: string): boolean;
@@ -62,26 +78,6 @@ declare module "bun:jsc" {
*/
function setTimeZone(timeZone: string): string;
interface HeapStats {
heapSize: number;
heapCapacity: number;
extraMemorySize: number;
objectCount: number;
protectedObjectCount: number;
globalObjectCount: number;
protectedGlobalObjectCount: number;
objectTypeCounts: Record<string, number>;
protectedObjectTypeCounts: Record<string, number>;
}
interface MemoryUsage {
current: number;
peak: number;
currentCommit: number;
peakCommit: number;
pageFaults: number;
}
interface SamplingProfile {
/**
* A formatted summary of the top functions

View File

@@ -15,7 +15,8 @@
],
"homepage": "https://bun.sh",
"dependencies": {
"@types/node": "*"
"@types/node": "*",
"@types/ws": "*"
},
"devDependencies": {
"@biomejs/biome": "^1.5.3",

View File

@@ -1,5 +1,11 @@
declare module "bun" {
export interface RedisOptions {
/**
* URL to connect to, defaults to "redis://localhost:6379"
* Supported protocols: redis://, rediss://, redis+unix://, redis+tls://
*/
url?: string;
/**
* Connection timeout in milliseconds
* @default 10000

View File

@@ -9,30 +9,17 @@ declare module "bun" {
* Write a chunk of data to the file.
*
* If the file descriptor is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to disk or the pipe.
*
* @returns Number of bytes flushed or a Promise resolving to the number of bytes
*/
flush(): number | Promise<number>;
/**
* Close the file descriptor. This also flushes the internal buffer.
*
* @param error Optional error to associate with the close operation
* @returns Number of bytes written or a Promise resolving to the number of bytes
*/
end(error?: Error): number | Promise<number>;
/**
* Start the file sink with provided options.
*
* @param options Configuration options for the file sink
*/
start(options?: {
/**
* Preallocate an internal buffer of this size
@@ -76,29 +63,19 @@ declare module "bun" {
* Write a chunk of data to the network.
*
* If the network is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to the network.
*
* @returns Number of bytes flushed or a Promise resolving to the number of bytes
*/
flush(): number | Promise<number>;
/**
* Finish the upload. This also flushes the internal buffer.
*
* @param error Optional error to associate with the end operation
* @returns Number of bytes written or a Promise resolving to the number of bytes
*/
end(error?: Error): number | Promise<number>;
/**
* Get the stat of the file.
*
* @returns Promise resolving to the file stats
*/
stat(): Promise<import("node:fs").Stats>;
}
@@ -672,7 +649,7 @@ declare module "bun" {
contents?: {
/** The algorithm that was used to create a checksum of the object. */
checksumAlgorithm?: "CRC32" | "CRC32C" | "SHA1" | "SHA256" | "CRC64NVME";
/** The checksum type that is used to calculate the object's checksum value. */
/** The checksum type that is used to calculate the objects checksum value. */
checksumType?: "COMPOSITE" | "FULL_OBJECT";
/**
* The entity tag is a hash of the object. The ETag reflects changes only to the contents of an object, not its metadata. The ETag may or may not be an MD5 digest of the object data. Whether or not it is depends on how the object was created and how it is encrypted as described below:
@@ -764,13 +741,13 @@ declare module "bun" {
* @category Cloud Storage
*/
class S3Client {
prototype: S3Client;
/**
* Create a new instance of an S3 bucket so that credentials can be managed
* from a single instance instead of being passed to every method.
*
* @param options The default options to use for the S3 client. Can be
* overriden by passing options to the methods.
* @returns A new S3Client instance
*
* ## Keep S3 credentials in a single instance
*
@@ -802,49 +779,20 @@ declare module "bun" {
/**
* Creates an S3File instance for the given path.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns An S3File instance
*
* @example
* const file = bucket.file("image.jpg");
* await file.write(imageData);
*
* const configFile = bucket.file("config.json", {
* type: "application/json",
* acl: "private"
* });
* const file = bucket.file("image.jpg");
* await file.write(imageData);
* const configFile = bucket.file("config.json", {
* type: "application/json",
* acl: "private"
* });
*/
file(path: string, options?: S3Options): S3File;
/**
* Creates an S3File instance for the given path.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns An S3File instance
*
* @example
* const file = S3Client.file("image.jpg", credentials);
* await file.write(imageData);
*
* const configFile = S3Client.file("config.json", {
* ...credentials,
* type: "application/json",
* acl: "private"
* });
*/
static file(path: string, options?: S3Options): S3File;
/**
* Writes data directly to a path in the bucket.
* Supports strings, buffers, streams, and web API types.
*
* @param path The path to the file in the bucket
* @param data The data to write to the file
* @param options Additional S3 options to override defaults
* @returns The number of bytes written
*
* @example
* // Write string
* await bucket.write("hello.txt", "Hello World");
@@ -882,64 +830,10 @@ declare module "bun" {
options?: S3Options,
): Promise<number>;
/**
* Writes data directly to a path in the bucket.
* Supports strings, buffers, streams, and web API types.
*
* @param path The path to the file in the bucket
* @param data The data to write to the file
* @param options S3 credentials and configuration options
* @returns The number of bytes written
*
* @example
* // Write string
* await S3Client.write("hello.txt", "Hello World", credentials);
*
* // Write JSON with type
* await S3Client.write(
* "data.json",
* JSON.stringify({hello: "world"}),
* {
* ...credentials,
* type: "application/json"
* }
* );
*
* // Write from fetch
* const res = await fetch("https://example.com/data");
* await S3Client.write("data.bin", res, credentials);
*
* // Write with ACL
* await S3Client.write("public.html", html, {
* ...credentials,
* acl: "public-read",
* type: "text/html"
* });
*/
static write(
path: string,
data:
| string
| ArrayBufferView
| ArrayBuffer
| SharedArrayBuffer
| Request
| Response
| BunFile
| S3File
| Blob
| File,
options?: S3Options,
): Promise<number>;
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
*
* @param path The path to the file in the bucket
* @param options Options for generating the presigned URL
* @returns A presigned URL string
*
* @example
* // Download URL
* const downloadUrl = bucket.presign("file.pdf", {
@@ -962,46 +856,9 @@ declare module "bun" {
*/
presign(path: string, options?: S3FilePresignOptions): string;
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and presigned URL configuration
* @returns A presigned URL string
*
* @example
* // Download URL
* const downloadUrl = S3Client.presign("file.pdf", {
* ...credentials,
* expiresIn: 3600 // 1 hour
* });
*
* // Upload URL
* const uploadUrl = S3Client.presign("uploads/image.jpg", {
* ...credentials,
* method: "PUT",
* expiresIn: 3600,
* type: "image/jpeg",
* acl: "public-read"
* });
*
* // Long-lived public URL
* const publicUrl = S3Client.presign("public/doc.pdf", {
* ...credentials,
* expiresIn: 7 * 24 * 60 * 60, // 7 days
* acl: "public-read"
* });
*/
static presign(path: string, options?: S3FilePresignOptions): string;
/**
* Delete a file from the bucket.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await bucket.unlink("old-file.txt");
@@ -1015,80 +872,12 @@ declare module "bun" {
* }
*/
unlink(path: string, options?: S3Options): Promise<void>;
/**
* Delete a file from the bucket.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await S3Client.unlink("old-file.txt", credentials);
*
* // With error handling
* try {
* await S3Client.unlink("file.dat", credentials);
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
static unlink(path: string, options?: S3Options): Promise<void>;
/**
* Delete a file from the bucket.
* Alias for {@link S3Client.unlink}.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await bucket.delete("old-file.txt");
*
* // With error handling
* try {
* await bucket.delete("file.dat");
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
delete(path: string, options?: S3Options): Promise<void>;
/**
* Delete a file from the bucket.
* Alias for {@link S3Client.unlink}.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await S3Client.delete("old-file.txt", credentials);
*
* // With error handling
* try {
* await S3Client.delete("file.dat", credentials);
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
static delete(path: string, options?: S3Options): Promise<void>;
delete: S3Client["unlink"];
/**
* Get the size of a file in bytes.
* Uses HEAD request to efficiently get size.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the file size in bytes
*
* @example
* // Get size
* const bytes = await bucket.size("video.mp4");
@@ -1101,34 +890,10 @@ declare module "bun" {
*/
size(path: string, options?: S3Options): Promise<number>;
/**
* Get the size of a file in bytes.
* Uses HEAD request to efficiently get size.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the file size in bytes
*
* @example
* // Get size
* const bytes = await S3Client.size("video.mp4", credentials);
* console.log(`Size: ${bytes} bytes`);
*
* // Check if file is large
* if (await S3Client.size("data.zip", credentials) > 100 * 1024 * 1024) {
* console.log("File is larger than 100MB");
* }
*/
static size(path: string, options?: S3Options): Promise<number>;
/**
* Check if a file exists in the bucket.
* Uses HEAD request to check existence.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to true if the file exists, false otherwise
*
* @example
* // Check existence
* if (await bucket.exists("config.json")) {
@@ -1146,124 +911,23 @@ declare module "bun" {
* }
*/
exists(path: string, options?: S3Options): Promise<boolean>;
/**
* Check if a file exists in the bucket.
* Uses HEAD request to check existence.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to true if the file exists, false otherwise
*
* @example
* // Check existence
* if (await S3Client.exists("config.json", credentials)) {
* const file = bucket.file("config.json");
* const config = await file.json();
* }
*
* // With error handling
* try {
* if (!await S3Client.exists("required.txt", credentials)) {
* throw new Error("Required file missing");
* }
* } catch (err) {
* console.error("Check failed:", err);
* }
*/
static exists(path: string, options?: S3Options): Promise<boolean>;
/**
* Get the stat of a file in an S3-compatible storage service.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the file stats
*
* @example
* const stat = await bucket.stat("my-file.txt");
* @param path The path to the file.
* @param options The options to use for the S3 client.
*/
stat(path: string, options?: S3Options): Promise<S3Stats>;
/**
* Get the stat of a file in an S3-compatible storage service.
/** Returns some or all (up to 1,000) of the objects in a bucket with each request.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the file stats
*
* @example
* const stat = await S3Client.stat("my-file.txt", credentials);
*/
static stat(path: string, options?: S3Options): Promise<S3Stats>;
/**
* Returns some or all (up to 1,000) of the objects in a bucket with each request.
*
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
*
* @param input Options for listing objects in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the list response
*
* @example
* // List (up to) 1000 objects in the bucket
* const allObjects = await bucket.list();
*
* // List (up to) 500 objects under `uploads/` prefix, with owner field for each object
* const uploads = await bucket.list({
* prefix: 'uploads/',
* maxKeys: 500,
* fetchOwner: true,
* });
*
* // Check if more results are available
* if (uploads.isTruncated) {
* // List next batch of objects under `uploads/` prefix
* const moreUploads = await bucket.list({
* prefix: 'uploads/',
* maxKeys: 500,
* startAfter: uploads.contents!.at(-1).key
* fetchOwner: true,
* });
* }
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
*/
list(
input?: S3ListObjectsOptions | null,
options?: Pick<S3Options, "accessKeyId" | "secretAccessKey" | "sessionToken" | "region" | "bucket" | "endpoint">,
): Promise<S3ListObjectsResponse>;
/**
* Returns some or all (up to 1,000) of the objects in a bucket with each request.
*
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
*
* @param input Options for listing objects in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the list response
*
* @example
* // List (up to) 1000 objects in the bucket
* const allObjects = await S3Client.list(null, credentials);
*
* // List (up to) 500 objects under `uploads/` prefix, with owner field for each object
* const uploads = await S3Client.list({
* prefix: 'uploads/',
* maxKeys: 500,
* fetchOwner: true,
* }, credentials);
*
* // Check if more results are available
* if (uploads.isTruncated) {
* // List next batch of objects under `uploads/` prefix
* const moreUploads = await S3Client.list({
* prefix: 'uploads/',
* maxKeys: 500,
* startAfter: uploads.contents!.at(-1).key
* fetchOwner: true,
* }, credentials);
* }
*/
static list(
input?: S3ListObjectsOptions | null,
options?: Pick<S3Options, "accessKeyId" | "secretAccessKey" | "sessionToken" | "region" | "bucket" | "endpoint">,

View File

@@ -1,379 +0,0 @@
declare module "bun" {
type ShellFunction = (input: Uint8Array) => Uint8Array;
type ShellExpression =
| { toString(): string }
| Array<ShellExpression>
| string
| { raw: string }
| Subprocess
| SpawnOptions.Readable
| SpawnOptions.Writable
| ReadableStream;
/**
* The [Bun shell](https://bun.sh/docs/runtime/shell) is a powerful tool for running shell commands.
*
* @example
* ```ts
* const result = await $`echo "Hello, world!"`.text();
* console.log(result); // "Hello, world!"
* ```
*
* @category Process Management
*/
function $(strings: TemplateStringsArray, ...expressions: ShellExpression[]): $.ShellPromise;
namespace $ {
const Shell: new () => typeof $;
/**
* Perform bash-like brace expansion on the given pattern.
* @param pattern - Brace pattern to expand
*
* @example
* ```js
* const result = braces('index.{js,jsx,ts,tsx}');
* console.log(result) // ['index.js', 'index.jsx', 'index.ts', 'index.tsx']
* ```
*/
function braces(pattern: string): string[];
/**
* Escape strings for input into shell commands.
* @param input
*/
function escape(input: string): string;
/**
*
* Change the default environment variables for shells created by this instance.
*
* @param newEnv Default environment variables to use for shells created by this instance.
* @default process.env
*
* @example
* ```js
* import {$} from 'bun';
* $.env({ BUN: "bun" });
* await $`echo $BUN`;
* // "bun"
* ```
*/
function env(newEnv?: Record<string, string | undefined>): typeof $;
/**
*
* @param newCwd Default working directory to use for shells created by this instance.
*/
function cwd(newCwd?: string): typeof $;
/**
* Configure the shell to not throw an exception on non-zero exit codes.
*/
function nothrow(): typeof $;
/**
* Configure whether or not the shell should throw an exception on non-zero exit codes.
*/
function throws(shouldThrow: boolean): typeof $;
/**
* The `Bun.$.ShellPromise` class represents a shell command that gets executed
* once awaited, or called with `.text()`, `.json()`, etc.
*
* @example
* ```ts
* const myShellPromise = $`echo "Hello, world!"`;
* const result = await myShellPromise.text();
* console.log(result); // "Hello, world!"
* ```
*/
class ShellPromise extends Promise<ShellOutput> {
get stdin(): WritableStream;
/**
* Change the current working directory of the shell.
* @param newCwd - The new working directory
*/
cwd(newCwd: string): this;
/**
* Set environment variables for the shell.
* @param newEnv - The new environment variables
*
* @example
* ```ts
* await $`echo $FOO`.env({ ...process.env, FOO: "LOL!" })
* expect(stdout.toString()).toBe("LOL!");
* ```
*/
env(newEnv: Record<string, string> | undefined): this;
/**
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.
*
* This configures the shell to only buffer the output.
*/
quiet(): this;
/**
* Read from stdout as a string, line by line
*
* Automatically calls {@link quiet} to disable echoing to stdout.
*/
lines(): AsyncIterable<string>;
/**
* Read from stdout as a string.
*
* Automatically calls {@link quiet} to disable echoing to stdout.
*
* @param encoding - The encoding to use when decoding the output
* @returns A promise that resolves with stdout as a string
*
* @example
* **Read as UTF-8 string**
* ```ts
* const output = await $`echo hello`.text();
* console.log(output); // "hello\n"
* ```
*
* **Read as base64 string**
* ```ts
* const output = await $`echo ${atob("hello")}`.text("base64");
* console.log(output); // "hello\n"
* ```
*/
text(encoding?: BufferEncoding): Promise<string>;
/**
* Read from stdout as a JSON object
*
* Automatically calls {@link quiet}
*
* @returns A promise that resolves with stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`.json();
* console.log(output); // { hello: 123 }
* ```
*
*/
json(): Promise<any>;
/**
* Read from stdout as an ArrayBuffer
*
* Automatically calls {@link quiet}
* @returns A promise that resolves with stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`.arrayBuffer();
* console.log(output); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): Promise<ArrayBuffer>;
/**
* Read from stdout as a Blob
*
* Automatically calls {@link quiet}
* @returns A promise that resolves with stdout as a Blob
* @example
* ```ts
* const output = await $`echo hello`.blob();
* console.log(output); // Blob { size: 6, type: "" }
* ```
*/
blob(): Promise<Blob>;
/**
* Configure the shell to not throw an exception on non-zero exit codes. Throwing can be re-enabled with `.throws(true)`.
*
* By default, the shell with throw an exception on commands which return non-zero exit codes.
*/
nothrow(): this;
/**
* Configure whether or not the shell should throw an exception on non-zero exit codes.
*
* By default, this is configured to `true`.
*/
throws(shouldThrow: boolean): this;
}
/**
* ShellError represents an error that occurred while executing a shell command with [the Bun Shell](https://bun.sh/docs/runtime/shell).
*
* @example
* ```ts
* try {
* const result = await $`exit 1`;
* } catch (error) {
* if (error instanceof ShellError) {
* console.log(error.exitCode); // 1
* }
* }
* ```
*/
class ShellError extends Error implements ShellOutput {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
*
* @example
* **Read as UTF-8 string**
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* **Read as base64 string**
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
/**
* Read from stdout as an Uint8Array
*
* @returns Stdout as an Uint8Array
* @example
*```ts
* const output = await $`echo hello`;
* console.log(output.bytes()); // Uint8Array { byteLength: 6 }
* ```
*/
bytes(): Uint8Array;
}
interface ShellOutput {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
*
* @example
* **Read as UTF-8 string**
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* **Read as base64 string**
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as an Uint8Array
*
* @returns Stdout as an Uint8Array
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.bytes()); // Uint8Array { byteLength: 6 }
* ```
*/
bytes(): Uint8Array;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
}
}
}

View File

@@ -13,15 +13,15 @@
*
* The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | --------------- | ---------------------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
* | JavaScript type | SQLite type |
* | -------------- | ----------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*/
declare module "bun:sqlite" {
/**
@@ -159,20 +159,6 @@ declare module "bun:sqlite" {
*
* This does not cache the query, so if you want to run a query multiple times, you should use {@link prepare} instead.
*
* Under the hood, this calls `sqlite3_prepare_v3` followed by `sqlite3_step` and `sqlite3_finalize`.
*
* The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | --------------- | ---------------------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*
* @example
* ```ts
* db.run("CREATE TABLE foo (bar TEXT)");
@@ -198,15 +184,30 @@ declare module "bun:sqlite" {
* - `CREATE TEMPORARY TABLE`
*
* @param sql The SQL query to run
*
* @param bindings Optional bindings for the query
*
* @returns `Database` instance
*
* Under the hood, this calls `sqlite3_prepare_v3` followed by `sqlite3_step` and `sqlite3_finalize`.
*
* * The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | -------------- | ----------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*/
run<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
run<ParamsType extends SQLQueryBindings[]>(sqlQuery: string, ...bindings: ParamsType[]): Changes;
/**
* This is an alias of {@link Database.run}
This is an alias of {@link Database.prototype.run}
*/
exec<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
exec<ParamsType extends SQLQueryBindings[]>(sqlQuery: string, ...bindings: ParamsType[]): Changes;
/**
* Compile a SQL query and return a {@link Statement} object. This is the
@@ -215,8 +216,6 @@ declare module "bun:sqlite" {
* This **does not execute** the query, but instead prepares it for later
* execution and caches the compiled query if possible.
*
* Under the hood, this calls `sqlite3_prepare_v3`.
*
* @example
* ```ts
* // compile the query
@@ -229,19 +228,21 @@ declare module "bun:sqlite" {
* ```
*
* @param sql The SQL query to compile
*
* @returns `Statment` instance
*
* Under the hood, this calls `sqlite3_prepare_v3`.
*/
query<ReturnType, ParamsType extends SQLQueryBindings | SQLQueryBindings[]>(
sql: string,
): Statement<ReturnType, ParamsType extends any[] ? ParamsType : [ParamsType]>;
sqlQuery: string,
): // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
Statement<ReturnType, ParamsType extends any[] ? ParamsType : [ParamsType]>;
/**
* Compile a SQL query and return a {@link Statement} object.
*
* This does not cache the compiled query and does not execute the query.
*
* Under the hood, this calls `sqlite3_prepare_v3`.
*
* @example
* ```ts
* // compile the query
@@ -253,12 +254,15 @@ declare module "bun:sqlite" {
* @param sql The SQL query to compile
* @param params Optional bindings for the query
*
* @returns A {@link Statement} instance
* @returns `Statment` instance
*
* Under the hood, this calls `sqlite3_prepare_v3`.
*/
prepare<ReturnType, ParamsType extends SQLQueryBindings | SQLQueryBindings[]>(
sql: string,
sqlQuery: string,
params?: ParamsType,
): Statement<ReturnType, ParamsType extends any[] ? ParamsType : [ParamsType]>;
): // eslint-disable-next-line @definitelytyped/no-single-element-tuple-type
Statement<ReturnType, ParamsType extends any[] ? ParamsType : [ParamsType]>;
/**
* Is the database in a transaction?
@@ -642,15 +646,15 @@ declare module "bun:sqlite" {
*
* The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | --------------- | ---------------------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
* | JavaScript type | SQLite type |
* | -------------- | ----------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*/
get(...params: ParamsType): ReturnType | null;
@@ -683,15 +687,15 @@ declare module "bun:sqlite" {
*
* The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | --------------- | ---------------------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
* | JavaScript type | SQLite type |
* | -------------- | ----------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*/
run(...params: ParamsType): Changes;
@@ -723,15 +727,15 @@ declare module "bun:sqlite" {
*
* The following types can be used when binding parameters:
*
* | JavaScript type | SQLite type |
* | --------------- | ---------------------- |
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
* | JavaScript type | SQLite type |
* | ---------------|-------------|
* | `string` | `TEXT` |
* | `number` | `INTEGER` or `DECIMAL` |
* | `boolean` | `INTEGER` (1 or 0) |
* | `Uint8Array` | `BLOB` |
* | `Buffer` | `BLOB` |
* | `bigint` | `INTEGER` |
* | `null` | `NULL` |
*/
values(...params: ParamsType): Array<Array<string | bigint | number | boolean | Uint8Array>>;

View File

@@ -29,14 +29,11 @@ declare module "bun:test" {
*
* This is useful for mocking modules.
*
* If the module is already loaded, exports are overwritten with the return
* value of `factory`. If the export didn't exist before, it will not be
* added to existing import statements. This is due to how ESM works.
*
* @param id module ID to mock
* @param factory a function returning an object that will be used as the exports of the mocked module
*
* @example
* ## Example
* ```ts
* import { mock } from "bun:test";
*
@@ -50,6 +47,12 @@ declare module "bun:test" {
*
* console.log(await readFile("hello.txt", "utf8")); // hello world
* ```
*
* ## More notes
*
* If the module is already loaded, exports are overwritten with the return
* value of `factory`. If the export didn't exist before, it will not be
* added to existing import statements. This is due to how ESM works.
*/
module(id: string, factory: () => any): void | Promise<void>;
/**
@@ -152,8 +155,6 @@ declare module "bun:test" {
readonly name: string;
}
type DescribeLabel = number | string | Function | FunctionLike;
/**
* Describes a group of related tests.
*
@@ -175,28 +176,28 @@ declare module "bun:test" {
export interface Describe {
(fn: () => void): void;
(label: DescribeLabel, fn: () => void): void;
(label: number | string | Function | FunctionLike, fn: () => void): void;
/**
* Skips all other tests, except this group of tests.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
only(label: DescribeLabel, fn: () => void): void;
only(label: string, fn: () => void): void;
/**
* Skips this group of tests.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
skip(label: DescribeLabel, fn: () => void): void;
skip(label: string, fn: () => void): void;
/**
* Marks this group of tests as to be written or to be fixed.
*
* @param label the label for the tests
* @param fn the function that defines the tests
*/
todo(label: DescribeLabel, fn?: () => void): void;
todo(label: string, fn?: () => void): void;
/**
* Runs this group of tests, only if `condition` is true.
*
@@ -204,19 +205,19 @@ declare module "bun:test" {
*
* @param condition if these tests should run
*/
if(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
if(condition: boolean): (label: string, fn: () => void) => void;
/**
* Skips this group of tests, if `condition` is true.
*
* @param condition if these tests should be skipped
*/
skipIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
skipIf(condition: boolean): (label: string, fn: () => void) => void;
/**
* Marks this group of tests as to be written or to be fixed, if `condition` is true.
*
* @param condition if these tests should be skipped
*/
todoIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
todoIf(condition: boolean): (label: string, fn: () => void) => void;
/**
* Returns a function that runs for each item in `table`.
*
@@ -224,17 +225,13 @@ declare module "bun:test" {
*/
each<T extends Readonly<[any, ...any[]]>>(
table: readonly T[],
): (label: DescribeLabel, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
): (label: string, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
each<T extends any[]>(
table: readonly T[],
): (
label: DescribeLabel,
fn: (...args: Readonly<T>) => void | Promise<unknown>,
options?: number | TestOptions,
) => void;
): (label: string, fn: (...args: Readonly<T>) => void | Promise<unknown>, options?: number | TestOptions) => void;
each<T>(
table: T[],
): (label: DescribeLabel, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
): (label: string, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
}
/**
* Describes a group of related tests.
@@ -596,6 +593,8 @@ declare module "bun:test" {
* @returns never
*
* @example
* ## Example
*
* ```ts
* import { expect, test } from "bun:test";
*
@@ -1787,329 +1786,367 @@ declare module "bun:test" {
}
type MatcherContext = MatcherUtils & MatcherState;
}
namespace JestMock {
declare namespace JestMock {
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export interface ClassLike {
new (...args: any): any;
}
export type ConstructorLikeKeys<T> = keyof {
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
};
// export const fn: <T extends FunctionLike = UnknownFunction>(
// implementation?: T | undefined,
// ) => Mock<T>;
export type FunctionLike = (...args: any) => any;
export type MethodLikeKeys<T> = keyof {
[K in keyof T as Required<T>[K] extends FunctionLike ? K : never]: T[K];
};
export interface Mock<T extends (...args: any[]) => any> extends MockInstance<T> {
(...args: Parameters<T>): ReturnType<T>;
}
/**
* All what the internal typings need is to be sure that we have any-function.
* `FunctionLike` type ensures that and helps to constrain the type as well.
* The default of `UnknownFunction` makes sure that `any`s do not leak to the
* user side. For instance, calling `fn()` without implementation will return
* a mock of `(...args: Array<unknown>) => unknown` type. If implementation
* is provided, its typings are inferred correctly.
*/
// export interface Mock<T extends FunctionLike = UnknownFunction>
// extends Function,
// MockInstance<T> {
// new (...args: Parameters<T>): ReturnType<T>;
// (...args: Parameters<T>): ReturnType<T>;
// }
// export type Mocked<T> = T extends ClassLike
// ? MockedClass<T>
// : T extends FunctionLike
// ? MockedFunction<T>
// : T extends object
// ? MockedObject<T>
// : T;
// export const mocked: {
// <T extends object>(
// source: T,
// options?: {
// shallow: false;
// },
// ): Mocked<T>;
// <T_1 extends object>(
// source: T_1,
// options: {
// shallow: true;
// },
// ): MockedShallow<T_1>;
// };
// export type MockedClass<T extends ClassLike> = MockInstance<
// (...args: ConstructorParameters<T>) => Mocked<InstanceType<T>>
// > &
// MockedObject<T>;
// export type MockedFunction<T extends FunctionLike> = MockInstance<T> &
// MockedObject<T>;
// type MockedFunctionShallow<T extends FunctionLike> = MockInstance<T> & T;
// export type MockedObject<T extends object> = {
// [K in keyof T]: T[K] extends ClassLike
// ? MockedClass<T[K]>
// : T[K] extends FunctionLike
// ? MockedFunction<T[K]>
// : T[K] extends object
// ? MockedObject<T[K]>
// : T[K];
// } & T;
// type MockedObjectShallow<T extends object> = {
// [K in keyof T]: T[K] extends ClassLike
// ? MockedClass<T[K]>
// : T[K] extends FunctionLike
// ? MockedFunctionShallow<T[K]>
// : T[K];
// } & T;
// export type MockedShallow<T> = T extends ClassLike
// ? MockedClass<T>
// : T extends FunctionLike
// ? MockedFunctionShallow<T>
// : T extends object
// ? MockedObjectShallow<T>
// : T;
// export type MockFunctionMetadata<
// T = unknown,
// MetadataType = MockMetadataType,
// > = MockMetadata<T, MetadataType>;
// export type MockFunctionMetadataType = MockMetadataType;
type MockFunctionResult<T extends FunctionLike = UnknownFunction> =
| MockFunctionResultIncomplete
| MockFunctionResultReturn<T>
| MockFunctionResultThrow;
interface MockFunctionResultIncomplete {
type: "incomplete";
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
* Result of a single call to a mock function that has not yet completed.
* This occurs if you test the result from within the mock function itself,
* or from within a function that was called by the mock.
*/
export interface ClassLike {
new (...args: any): any;
}
export type ConstructorLikeKeys<T> = keyof {
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
};
// export const fn: <T extends FunctionLike = UnknownFunction>(
// implementation?: T | undefined,
// ) => Mock<T>;
export type FunctionLike = (...args: any) => any;
export type MethodLikeKeys<T> = keyof {
[K in keyof T as Required<T>[K] extends FunctionLike ? K : never]: T[K];
};
export interface Mock<T extends (...args: any[]) => any> extends MockInstance<T> {
(...args: Parameters<T>): ReturnType<T>;
}
value: undefined;
}
interface MockFunctionResultReturn<T extends FunctionLike = UnknownFunction> {
type: "return";
/**
* All what the internal typings need is to be sure that we have any-function.
* `FunctionLike` type ensures that and helps to constrain the type as well.
* The default of `UnknownFunction` makes sure that `any`s do not leak to the
* user side. For instance, calling `fn()` without implementation will return
* a mock of `(...args: Array<unknown>) => unknown` type. If implementation
* is provided, its typings are inferred correctly.
* Result of a single call to a mock function that returned.
*/
// export interface Mock<T extends FunctionLike = UnknownFunction>
// extends Function,
// MockInstance<T> {
// new (...args: Parameters<T>): ReturnType<T>;
// (...args: Parameters<T>): ReturnType<T>;
// }
value: ReturnType<T>;
}
// export type Mocked<T> = T extends ClassLike
// ? MockedClass<T>
// : T extends FunctionLike
// ? MockedFunction<T>
// : T extends object
// ? MockedObject<T>
// : T;
interface MockFunctionResultThrow {
type: "throw";
/**
* Result of a single call to a mock function that threw.
*/
value: unknown;
}
// export const mocked: {
// <T extends object>(
// source: T,
// options?: {
// shallow: false;
// },
// ): Mocked<T>;
// <T_1 extends object>(
// source: T_1,
// options: {
// shallow: true;
// },
// ): MockedShallow<T_1>;
// };
interface MockFunctionState<T extends FunctionLike = FunctionLike> {
/**
* List of the call arguments of all calls that have been made to the mock.
*/
calls: Array<Parameters<T>>;
/**
* List of all the object instances that have been instantiated from the mock.
*/
instances: Array<ReturnType<T>>;
/**
* List of all the function contexts that have been applied to calls to the mock.
*/
contexts: Array<ThisParameterType<T>>;
/**
* List of the call order indexes of the mock. Jest is indexing the order of
* invocations of all mocks in a test file. The index is starting with `1`.
*/
invocationCallOrder: number[];
/**
* List of the call arguments of the last call that was made to the mock.
* If the function was not called, it will return `undefined`.
*/
lastCall?: Parameters<T>;
/**
* List of the results of all calls that have been made to the mock.
*/
results: Array<MockFunctionResult<T>>;
}
// export type MockedClass<T extends ClassLike> = MockInstance<
// (...args: ConstructorParameters<T>) => Mocked<InstanceType<T>>
// > &
// MockedObject<T>;
export interface MockInstance<T extends FunctionLike = UnknownFunction> {
_isMockFunction: true;
_protoImpl: Function;
getMockImplementation(): T | undefined;
getMockName(): string;
mock: MockFunctionState<T>;
mockClear(): this;
mockReset(): this;
mockRestore(): void;
mockImplementation(fn: T): this;
mockImplementationOnce(fn: T): this;
withImplementation(fn: T, callback: () => Promise<unknown>): Promise<void>;
withImplementation(fn: T, callback: () => void): void;
mockName(name: string): this;
mockReturnThis(): this;
mockReturnValue(value: ReturnType<T>): this;
mockReturnValueOnce(value: ReturnType<T>): this;
mockResolvedValue(value: ResolveType<T>): this;
mockResolvedValueOnce(value: ResolveType<T>): this;
mockRejectedValue(value: RejectType<T>): this;
mockRejectedValueOnce(value: RejectType<T>): this;
}
// export type MockedFunction<T extends FunctionLike> = MockInstance<T> &
// MockedObject<T>;
// export type MockMetadata<T, MetadataType = MockMetadataType> = {
// ref?: number;
// members?: Record<string, MockMetadata<T>>;
// mockImpl?: T;
// name?: string;
// refID?: number;
// type?: MetadataType;
// value?: T;
// length?: number;
// };
// type MockedFunctionShallow<T extends FunctionLike> = MockInstance<T> & T;
// export type MockMetadataType =
// | "object"
// | "array"
// | "regexp"
// | "function"
// | "constant"
// | "collection"
// | "null"
// | "undefined";
// export type MockedObject<T extends object> = {
// [K in keyof T]: T[K] extends ClassLike
// ? MockedClass<T[K]>
// : T[K] extends FunctionLike
// ? MockedFunction<T[K]>
// : T[K] extends object
// ? MockedObject<T[K]>
// : T[K];
// } & T;
// export class ModuleMocker {
// private readonly _environmentGlobal;
// private _mockState;
// private _mockConfigRegistry;
// private _spyState;
// private _invocationCallCounter;
// /**
// * @see README.md
// * @param global Global object of the test environment, used to create
// * mocks
// */
// constructor(global: typeof globalThis);
// private _getSlots;
// private _ensureMockConfig;
// private _ensureMockState;
// private _defaultMockConfig;
// private _defaultMockState;
// private _makeComponent;
// private _createMockFunction;
// private _generateMock;
// /**
// * Check whether the given property of an object has been already replaced.
// */
// private _findReplacedProperty;
// /**
// * @see README.md
// * @param metadata Metadata for the mock in the schema returned by the
// * getMetadata method of this module.
// */
// generateFromMetadata<T>(metadata: MockMetadata<T>): Mocked<T>;
// /**
// * @see README.md
// * @param component The component for which to retrieve metadata.
// */
// getMetadata<T = unknown>(
// component: T,
// _refs?: Map<T, number>,
// ): MockMetadata<T> | null;
// isMockFunction<T extends FunctionLike = UnknownFunction>(
// fn: MockInstance<T>,
// ): fn is MockInstance<T>;
// isMockFunction<P extends Array<unknown>, R>(
// fn: (...args: P) => R,
// ): fn is Mock<(...args: P) => R>;
// isMockFunction(fn: unknown): fn is Mock<UnknownFunction>;
// fn<T extends FunctionLike = UnknownFunction>(implementation?: T): Mock<T>;
// private _attachMockImplementation;
// spyOn<
// T extends object,
// K extends PropertyLikeKeys<T>,
// A extends "get" | "set",
// >(
// object: T,
// methodKey: K,
// accessType: A,
// ): A extends "get"
// ? SpiedGetter<T[K]>
// : A extends "set"
// ? SpiedSetter<T[K]>
// : never;
// spyOn<
// T extends object,
// K extends ConstructorLikeKeys<T> | MethodLikeKeys<T>,
// V extends Required<T>[K],
// >(
// object: T,
// methodKey: K,
// ): V extends ClassLike | FunctionLike ? Spied<V> : never;
// private _spyOnProperty;
// replaceProperty<
// T extends object,
// K extends PropertyLikeKeys<T>,
// V extends T[K],
// >(object: T, propertyKey: K, value: V): Replaced<T[K]>;
// clearAllMocks(): void;
// resetAllMocks(): void;
// restoreAllMocks(): void;
// private _typeOf;
// mocked<T extends object>(
// source: T,
// options?: {
// shallow: false;
// },
// ): Mocked<T>;
// mocked<T extends object>(
// source: T,
// options: {
// shallow: true;
// },
// ): MockedShallow<T>;
// }
// type MockedObjectShallow<T extends object> = {
// [K in keyof T]: T[K] extends ClassLike
// ? MockedClass<T[K]>
// : T[K] extends FunctionLike
// ? MockedFunctionShallow<T[K]>
// : T[K];
// } & T;
export type PropertyLikeKeys<T> = Exclude<keyof T, ConstructorLikeKeys<T> | MethodLikeKeys<T>>;
// export type MockedShallow<T> = T extends ClassLike
// ? MockedClass<T>
// : T extends FunctionLike
// ? MockedFunctionShallow<T>
// : T extends object
// ? MockedObjectShallow<T>
// : T;
export type RejectType<T extends FunctionLike> = ReturnType<T> extends PromiseLike<any> ? unknown : never;
// export type MockFunctionMetadata<
// T = unknown,
// MetadataType = MockMetadataType,
// > = MockMetadata<T, MetadataType>;
export interface Replaced<T = unknown> {
/**
* Restore property to its original value known at the time of mocking.
*/
restore(): void;
/**
* Change the value of the property.
*/
replaceValue(value: T): this;
}
// export type MockFunctionMetadataType = MockMetadataType;
export function replaceProperty<
T extends object,
K_2 extends Exclude<
keyof T,
| keyof {
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
}
| keyof {
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike ? K_1 : never]: T[K_1];
}
>,
V extends T[K_2],
>(object: T, propertyKey: K_2, value: V): Replaced<T[K_2]>;
type MockFunctionResult<T extends FunctionLike = UnknownFunction> =
| MockFunctionResultIncomplete
| MockFunctionResultReturn<T>
| MockFunctionResultThrow;
export type ResolveType<T extends FunctionLike> = ReturnType<T> extends PromiseLike<infer U> ? U : never;
interface MockFunctionResultIncomplete {
type: "incomplete";
/**
* Result of a single call to a mock function that has not yet completed.
* This occurs if you test the result from within the mock function itself,
* or from within a function that was called by the mock.
*/
value: undefined;
}
export type Spied<T extends ClassLike | FunctionLike> = T extends ClassLike
? SpiedClass<T>
: T extends FunctionLike
? SpiedFunction<T>
: never;
interface MockFunctionResultReturn<T extends FunctionLike = UnknownFunction> {
type: "return";
/**
* Result of a single call to a mock function that returned.
*/
value: ReturnType<T>;
}
export type SpiedClass<T extends ClassLike = UnknownClass> = MockInstance<
(...args: ConstructorParameters<T>) => InstanceType<T>
>;
interface MockFunctionResultThrow {
type: "throw";
/**
* Result of a single call to a mock function that threw.
*/
value: unknown;
}
export type SpiedFunction<T extends FunctionLike = UnknownFunction> = MockInstance<
(...args: Parameters<T>) => ReturnType<T>
>;
interface MockFunctionState<T extends FunctionLike = FunctionLike> {
/**
* List of the call arguments of all calls that have been made to the mock.
*/
calls: Array<Parameters<T>>;
/**
* List of all the object instances that have been instantiated from the mock.
*/
instances: Array<ReturnType<T>>;
/**
* List of all the function contexts that have been applied to calls to the mock.
*/
contexts: Array<ThisParameterType<T>>;
/**
* List of the call order indexes of the mock. Jest is indexing the order of
* invocations of all mocks in a test file. The index is starting with `1`.
*/
invocationCallOrder: number[];
/**
* List of the call arguments of the last call that was made to the mock.
* If the function was not called, it will return `undefined`.
*/
lastCall?: Parameters<T>;
/**
* List of the results of all calls that have been made to the mock.
*/
results: Array<MockFunctionResult<T>>;
}
export type SpiedGetter<T> = MockInstance<() => T>;
export interface MockInstance<T extends FunctionLike = UnknownFunction> {
_isMockFunction: true;
_protoImpl: Function;
getMockImplementation(): T | undefined;
getMockName(): string;
mock: MockFunctionState<T>;
mockClear(): this;
mockReset(): this;
mockRestore(): void;
mockImplementation(fn: T): this;
mockImplementationOnce(fn: T): this;
withImplementation(fn: T, callback: () => Promise<unknown>): Promise<void>;
withImplementation(fn: T, callback: () => void): void;
mockName(name: string): this;
mockReturnThis(): this;
mockReturnValue(value: ReturnType<T>): this;
mockReturnValueOnce(value: ReturnType<T>): this;
mockResolvedValue(value: ResolveType<T>): this;
mockResolvedValueOnce(value: ResolveType<T>): this;
mockRejectedValue(value: RejectType<T>): this;
mockRejectedValueOnce(value: RejectType<T>): this;
}
export type SpiedSetter<T> = MockInstance<(arg: T) => void>;
// export type MockMetadata<T, MetadataType = MockMetadataType> = {
// ref?: number;
// members?: Record<string, MockMetadata<T>>;
// mockImpl?: T;
// name?: string;
// refID?: number;
// type?: MetadataType;
// value?: T;
// length?: number;
// };
export interface SpyInstance<T extends FunctionLike = UnknownFunction> extends MockInstance<T> {}
// export type MockMetadataType =
// | "object"
// | "array"
// | "regexp"
// | "function"
// | "constant"
// | "collection"
// | "null"
// | "undefined";
// export class ModuleMocker {
// private readonly _environmentGlobal;
// private _mockState;
// private _mockConfigRegistry;
// private _spyState;
// private _invocationCallCounter;
// /**
// * @see README.md
// * @param global Global object of the test environment, used to create
// * mocks
// */
// constructor(global: typeof globalThis);
// private _getSlots;
// private _ensureMockConfig;
// private _ensureMockState;
// private _defaultMockConfig;
// private _defaultMockState;
// private _makeComponent;
// private _createMockFunction;
// private _generateMock;
// /**
// * Check whether the given property of an object has been already replaced.
// */
// private _findReplacedProperty;
// /**
// * @see README.md
// * @param metadata Metadata for the mock in the schema returned by the
// * getMetadata method of this module.
// */
// generateFromMetadata<T>(metadata: MockMetadata<T>): Mocked<T>;
// /**
// * @see README.md
// * @param component The component for which to retrieve metadata.
// */
// getMetadata<T = unknown>(
// component: T,
// _refs?: Map<T, number>,
// ): MockMetadata<T> | null;
// isMockFunction<T extends FunctionLike = UnknownFunction>(
// fn: MockInstance<T>,
// ): fn is MockInstance<T>;
// isMockFunction<P extends Array<unknown>, R>(
// fn: (...args: P) => R,
// ): fn is Mock<(...args: P) => R>;
// isMockFunction(fn: unknown): fn is Mock<UnknownFunction>;
// fn<T extends FunctionLike = UnknownFunction>(implementation?: T): Mock<T>;
// private _attachMockImplementation;
// spyOn<
// T extends object,
// K extends PropertyLikeKeys<T>,
// A extends "get" | "set",
// >(
// object: T,
// methodKey: K,
// accessType: A,
// ): A extends "get"
// ? SpiedGetter<T[K]>
// : A extends "set"
// ? SpiedSetter<T[K]>
// : never;
// spyOn<
// T extends object,
// K extends ConstructorLikeKeys<T> | MethodLikeKeys<T>,
// V extends Required<T>[K],
// >(
// object: T,
// methodKey: K,
// ): V extends ClassLike | FunctionLike ? Spied<V> : never;
// private _spyOnProperty;
// replaceProperty<
// T extends object,
// K extends PropertyLikeKeys<T>,
// V extends T[K],
// >(object: T, propertyKey: K, value: V): Replaced<T[K]>;
// clearAllMocks(): void;
// resetAllMocks(): void;
// restoreAllMocks(): void;
// private _typeOf;
// mocked<T extends object>(
// source: T,
// options?: {
// shallow: false;
// },
// ): Mocked<T>;
// mocked<T extends object>(
// source: T,
// options: {
// shallow: true;
// },
// ): MockedShallow<T>;
// }
export type PropertyLikeKeys<T> = Exclude<keyof T, ConstructorLikeKeys<T> | MethodLikeKeys<T>>;
export type RejectType<T extends FunctionLike> = ReturnType<T> extends PromiseLike<any> ? unknown : never;
export interface Replaced<T = unknown> {
/**
* Restore property to its original value known at the time of mocking.
*/
restore(): void;
/**
* Change the value of the property.
*/
replaceValue(value: T): this;
}
export function replaceProperty<
export const spyOn: {
<
T extends object,
K_2 extends Exclude<
keyof T,
@@ -2120,70 +2157,32 @@ declare module "bun:test" {
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike ? K_1 : never]: T[K_1];
}
>,
V extends T[K_2],
>(object: T, propertyKey: K_2, value: V): Replaced<T[K_2]>;
V extends Required<T>[K_2],
A extends "set" | "get",
>(
object: T,
methodKey: K_2,
accessType: A,
): A extends "get" ? SpiedGetter<V> : A extends "set" ? SpiedSetter<V> : never;
<
T_1 extends object,
K_5 extends
| keyof {
[K_3 in keyof T_1 as Required<T_1>[K_3] extends ClassLike ? K_3 : never]: T_1[K_3];
}
| keyof {
[K_4 in keyof T_1 as Required<T_1>[K_4] extends FunctionLike ? K_4 : never]: T_1[K_4];
},
V_1 extends Required<T_1>[K_5],
>(
object: T_1,
methodKey: K_5,
): V_1 extends ClassLike | FunctionLike ? Spied<V_1> : never;
};
export type ResolveType<T extends FunctionLike> = ReturnType<T> extends PromiseLike<infer U> ? U : never;
export type Spied<T extends ClassLike | FunctionLike> = T extends ClassLike
? SpiedClass<T>
: T extends FunctionLike
? SpiedFunction<T>
: never;
export type SpiedClass<T extends ClassLike = UnknownClass> = MockInstance<
(...args: ConstructorParameters<T>) => InstanceType<T>
>;
export type SpiedFunction<T extends FunctionLike = UnknownFunction> = MockInstance<
(...args: Parameters<T>) => ReturnType<T>
>;
export type SpiedGetter<T> = MockInstance<() => T>;
export type SpiedSetter<T> = MockInstance<(arg: T) => void>;
export interface SpyInstance<T extends FunctionLike = UnknownFunction> extends MockInstance<T> {}
export const spyOn: {
<
T extends object,
K_2 extends Exclude<
keyof T,
| keyof {
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
}
| keyof {
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike ? K_1 : never]: T[K_1];
}
>,
V extends Required<T>[K_2],
A extends "set" | "get",
>(
object: T,
methodKey: K_2,
accessType: A,
): A extends "get" ? SpiedGetter<V> : A extends "set" ? SpiedSetter<V> : never;
<
T_1 extends object,
K_5 extends
| keyof {
[K_3 in keyof T_1 as Required<T_1>[K_3] extends ClassLike ? K_3 : never]: T_1[K_3];
}
| keyof {
[K_4 in keyof T_1 as Required<T_1>[K_4] extends FunctionLike ? K_4 : never]: T_1[K_4];
},
V_1 extends Required<T_1>[K_5],
>(
object: T_1,
methodKey: K_5,
): V_1 extends ClassLike | FunctionLike ? Spied<V_1> : never;
};
export interface UnknownClass {
new (...args: unknown[]): unknown;
}
export type UnknownFunction = (...args: unknown[]) => unknown;
export interface UnknownClass {
new (...args: unknown[]): unknown;
}
export type UnknownFunction = (...args: unknown[]) => unknown;
}

View File

@@ -404,9 +404,9 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
ls->s.timeout = 255;
ls->s.long_timeout = 255;
ls->s.flags.low_prio_state = 0;
ls->s.next = 0;
ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
ls->s.flags.is_paused = 0;
ls->s.next = 0;
us_internal_socket_context_link_listen_socket(context, ls);
ls->socket_ext_size = socket_ext_size;
@@ -737,10 +737,9 @@ struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_con
connect_socket->timeout = 255;
connect_socket->long_timeout = 255;
connect_socket->flags.low_prio_state = 0;
connect_socket->connect_state = NULL;
connect_socket->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
connect_socket->flags.is_paused = 0;
connect_socket->connect_state = NULL;
connect_socket->connect_next = NULL;
us_internal_socket_context_link_socket(context, connect_socket);
return connect_socket;

View File

@@ -1617,7 +1617,7 @@ struct us_socket_t *us_internal_ssl_socket_context_connect(
2, &context->sc, host, port, options,
sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) +
socket_ext_size, is_connecting);
if (*is_connecting && s) {
if (*is_connecting) {
us_internal_zero_ssl_data_for_connected_socket_before_onopen(s);
}

View File

@@ -466,6 +466,7 @@ void us_socket_local_address(int ssl, us_socket_r s, char *nonnull_arg buf, int
/* Bun extras */
struct us_socket_t *us_socket_pair(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR* fds);
struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR fd);
struct us_socket_t *us_socket_attach(int ssl, LIBUS_SOCKET_DESCRIPTOR client_fd, struct us_socket_context_t *ctx, int flags, int socket_ext_size);
struct us_socket_t *us_socket_wrap_with_tls(int ssl, us_socket_r s, struct us_bun_socket_context_options_t options, struct us_socket_events_t events, int socket_ext_size);
int us_socket_raw_write(int ssl, us_socket_r s, const char *data, int length, int msg_more);
struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_client, char* ip, int ip_length);

View File

@@ -276,6 +276,27 @@ struct us_socket_t *us_socket_detach(int ssl, struct us_socket_t *s) {
return s;
}
// This function is used for moving a socket between two different event loops
struct us_socket_t *us_socket_attach(int ssl, LIBUS_SOCKET_DESCRIPTOR client_fd, struct us_socket_context_t *ctx, int flags, int socket_ext_size) {
struct us_poll_t *accepted_p = us_create_poll(ctx->loop, 0, sizeof(struct us_socket_t) - sizeof(struct us_poll_t) + socket_ext_size);
us_poll_init(accepted_p, client_fd, POLL_TYPE_SOCKET);
us_poll_start(accepted_p, ctx->loop, flags);
struct us_socket_t *s = (struct us_socket_t *) accepted_p;
s->context = ctx;
s->timeout = 0;
s->flags.low_prio_state = 0;
/* We always use nodelay */
bsd_socket_nodelay(client_fd, 1);
us_internal_socket_context_link_socket(ctx, s);
if (ctx->on_open) ctx->on_open(s, 0, 0, 0);
return s;
}
struct us_socket_t *us_socket_pair(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR* fds) {
#if defined(LIBUS_USE_LIBUV) || defined(WIN32)
return 0;
@@ -316,17 +337,21 @@ struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socke
struct us_socket_t *s = (struct us_socket_t *) p1;
s->context = ctx;
s->timeout = 255;
s->long_timeout = 255;
s->timeout = 0;
s->long_timeout = 0;
s->flags.low_prio_state = 0;
s->flags.allow_half_open = 0;
s->flags.is_paused = 0;
s->connect_state = NULL;
s->flags.allow_half_open = 0;
/* We always use nodelay */
bsd_socket_nodelay(fd, 1);
apple_no_sigpipe(fd);
bsd_set_nonblocking(fd);
int flags = fcntl(fd, F_GETFL, 0);
if (flags != -1) {
flags |= O_NONBLOCK;
fcntl(fd, F_SETFL, flags);
}
us_internal_socket_context_link_socket(ctx, s);
return s;

View File

@@ -614,22 +614,13 @@ public:
httpContext->getSocketContextData()->onSocketClosed = onClose;
}
void setOnClientError(HttpContextData<SSL>::OnClientErrorCallback onClientError) {
httpContext->getSocketContextData()->onClientError = std::move(onClientError);
}
TemplatedApp &&run() {
uWS::run();
return std::move(*this);
}
TemplatedApp &&setUsingCustomExpectHandler(bool value) {
httpContext->getSocketContextData()->flags.usingCustomExpectHandler = value;
return std::move(*this);
}
TemplatedApp &&setRequireHostHeader(bool value) {
httpContext->getSocketContextData()->flags.requireHostHeader = value;
httpContext->getSocketContextData()->usingCustomExpectHandler = value;
return std::move(*this);
}

View File

@@ -31,7 +31,7 @@
#include <string_view>
#include <iostream>
#include "MoveOnlyFunction.h"
#include "HttpParser.h"
namespace uWS {
template<bool> struct HttpResponse;
@@ -73,8 +73,8 @@ private:
// if we are closing or already closed, we don't need to do anything
if (!us_socket_is_closed(SSL, s) && !us_socket_is_shut_down(SSL, s)) {
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
httpContextData->flags.isSecure = success;
if(httpContextData->flags.rejectUnauthorized) {
if(httpContextData->rejectUnauthorized) {
if(!success || verify_error.error != 0) {
// we failed to handshake, close the socket
us_socket_close(SSL, s, 0, nullptr);
@@ -118,15 +118,8 @@ private:
/* Get socket ext */
auto *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(us_socket_ext(SSL, s));
/* Call filter */
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
if(httpContextData->flags.isParsingHttp) {
if(httpContextData->onClientError) {
httpContextData->onClientError(SSL, s,uWS::HTTP_PARSER_ERROR_INVALID_EOF, nullptr, 0);
}
}
for (auto &f : httpContextData->filterHandlers) {
f((HttpResponse<SSL> *) s, -1);
}
@@ -170,7 +163,7 @@ private:
((AsyncSocket<SSL> *) s)->cork();
/* Mark that we are inside the parser now */
httpContextData->flags.isParsingHttp = true;
httpContextData->isParsingHttp = true;
// clients need to know the cursor after http parse, not servers!
// how far did we read then? we need to know to continue with websocket parsing data? or?
@@ -181,7 +174,7 @@ private:
#endif
/* The return value is entirely up to us to interpret. The HttpParser cares only for whether the returned value is DIFFERENT from passed user */
auto [err, parserError, returnedSocket] = httpResponseData->consumePostPadded(httpContextData->flags.requireHostHeader,data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
auto [err, returnedSocket] = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
/* For every request we reset the timeout and hang until user makes action */
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
us_socket_timeout(SSL, (us_socket_t *) s, 0);
@@ -208,7 +201,6 @@ private:
httpResponseData->fromAncientRequest = httpRequest->isAncient();
/* Select the router based on SNI (only possible for SSL) */
auto *selectedRouter = &httpContextData->router;
if constexpr (SSL) {
@@ -298,12 +290,10 @@ private:
});
/* Mark that we are no longer parsing Http */
httpContextData->flags.isParsingHttp = false;
httpContextData->isParsingHttp = false;
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
if (returnedSocket == FULLPTR) {
if(httpContextData->onClientError) {
httpContextData->onClientError(SSL, s, parserError, data, length);
}
/* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */
us_socket_write(SSL, s, httpErrorResponses[err].data(), (int) httpErrorResponses[err].length(), false);
us_socket_shutdown(SSL, s);
@@ -477,7 +467,7 @@ public:
/* Init socket context data */
auto* httpContextData = new ((HttpContextData<SSL> *) us_socket_context_ext(SSL, (us_socket_context_t *) httpContext)) HttpContextData<SSL>();
if(options.request_cert && options.reject_unauthorized) {
httpContextData->flags.rejectUnauthorized = true;
httpContextData->rejectUnauthorized = true;
}
return httpContext->init();
}
@@ -525,15 +515,15 @@ public:
}
}
const bool &customContinue = httpContextData->usingCustomExpectHandler;
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets), httpContextData](auto *r) mutable {
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets), &customContinue](auto *r) mutable {
auto user = r->getUserData();
user.httpRequest->setYield(false);
user.httpRequest->setParameters(r->getParameters());
user.httpRequest->setParameterOffsets(&parameterOffsets);
if (!httpContextData->flags.usingCustomExpectHandler) {
if (!customContinue) {
/* Middleware? Automatically respond to expectations */
std::string_view expect = user.httpRequest->getHeader("expect");
if (expect.length() && expect == "100-continue") {

View File

@@ -22,19 +22,11 @@
#include <vector>
#include "MoveOnlyFunction.h"
#include "HttpParser.h"
namespace uWS {
template<bool> struct HttpResponse;
struct HttpRequest;
struct HttpFlags {
bool isParsingHttp: 1 = false;
bool rejectUnauthorized: 1 = false;
bool usingCustomExpectHandler: 1 = false;
bool requireHostHeader: 1 = true;
bool isSecure: 1 = false;
};
template <bool SSL>
struct alignas(16) HttpContextData {
template <bool> friend struct HttpContext;
@@ -43,7 +35,6 @@ struct alignas(16) HttpContextData {
private:
std::vector<MoveOnlyFunction<void(HttpResponse<SSL> *, int)>> filterHandlers;
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
using OnClientErrorCallback = MoveOnlyFunction<void(int is_ssl, struct us_socket_t *rawSocket, uWS::HttpParserError errorCode, char *rawPacket, int rawPacketLength)>;
MoveOnlyFunction<void(const char *hostname)> missingServerNameHandler;
@@ -58,11 +49,12 @@ private:
/* This is the default router for default SNI or non-SSL */
HttpRouter<RouterData> router;
void *upgradedWebSocket = nullptr;
bool isParsingHttp = false;
bool rejectUnauthorized = false;
bool usingCustomExpectHandler = false;
/* Used to simulate Node.js socket events. */
OnSocketClosedCallback onSocketClosed = nullptr;
OnClientErrorCallback onClientError = nullptr;
HttpFlags flags;
// TODO: SNI
void clearRoutes() {
@@ -70,11 +62,6 @@ private:
this->currentRouter = &router;
filterHandlers.clear();
}
public:
bool isSecure() const {
return flags.isSecure;
}
};
}

View File

@@ -48,19 +48,6 @@ namespace uWS
static const unsigned int MINIMUM_HTTP_POST_PADDING = 32;
static void *FULLPTR = (void *)~(uintptr_t)0;
enum HttpParserError: uint8_t {
HTTP_PARSER_ERROR_NONE = 0,
HTTP_PARSER_ERROR_INVALID_CHUNKED_ENCODING = 1,
HTTP_PARSER_ERROR_INVALID_CONTENT_LENGTH = 2,
HTTP_PARSER_ERROR_INVALID_TRANSFER_ENCODING = 3,
HTTP_PARSER_ERROR_MISSING_HOST_HEADER = 4,
HTTP_PARSER_ERROR_INVALID_REQUEST = 5,
HTTP_PARSER_ERROR_REQUEST_HEADER_FIELDS_TOO_LARGE = 6,
HTTP_PARSER_ERROR_INVALID_HTTP_VERSION = 7,
HTTP_PARSER_ERROR_INVALID_EOF = 8,
HTTP_PARSER_ERROR_INVALID_METHOD = 9,
};
struct HttpRequest
{
@@ -72,8 +59,8 @@ namespace uWS
std::string_view key, value;
} headers[UWS_HTTP_MAX_HEADERS_COUNT];
bool ancientHttp;
bool didYield;
unsigned int querySeparator;
bool didYield;
BloomFilter bf;
std::pair<int, std::string_view *> currentParameters;
std::map<std::string, unsigned short, std::less<>> *currentParameterOffsets = nullptr;
@@ -147,7 +134,6 @@ namespace uWS
return std::string_view(nullptr, 0);
}
std::string_view getUrl()
{
return std::string_view(headers->value.data(), querySeparator);
@@ -326,18 +312,6 @@ namespace uWS
return (void *)p;
}
static bool isAlpha(std::string_view str) {
if (str.empty()) return false;
for (char c : str) {
if (!isAlphaChar(c))
return false;
}
return true;
}
static inline bool isAlphaChar(char c) {
return ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'));
}
static inline int isHTTPorHTTPSPrefixForProxies(char *data, char *end) {
// We can check 8 because:
// 1. If it's "http://" that's 7 bytes, and it's supposed to at least have a trailing slash.
@@ -383,13 +357,7 @@ namespace uWS
/* Scan until single SP, assume next is / (origin request) */
char *start = data;
/* This catches the post padded CR and fails */
while (data[0] > 32) {
if (!isAlphaChar(data[0])) {
return (char *) 0x3;
}
data++;
}
while (data[0] > 32) data++;
if (&data[1] == end) [[unlikely]] {
return nullptr;
}
@@ -397,9 +365,6 @@ namespace uWS
if (data[0] == 32 && (__builtin_expect(data[1] == '/', 1) || isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1)) [[likely]] {
header.key = {start, (size_t) (data - start)};
data++;
if(!isAlpha(header.key)) {
return (char *) 0x3;
}
/* Scan for less than 33 (catches post padded CR and fails) */
start = data;
for (; true; data += 8) {
@@ -471,7 +436,7 @@ namespace uWS
}
/* End is only used for the proxy parser. The HTTP parser recognizes "\ra" as invalid "\r\n" scan and breaks. */
static unsigned int getHeaders(char *postPaddedBuffer, char *end, struct HttpRequest::Header *headers, void *reserved, unsigned int &err, HttpParserError &parserError, bool &isAncientHTTP) {
static unsigned int getHeaders(char *postPaddedBuffer, char *end, struct HttpRequest::Header *headers, void *reserved, unsigned int &err, bool &isAncientHTTP) {
char *preliminaryKey, *preliminaryValue, *start = postPaddedBuffer;
#ifdef UWS_WITH_PROXY
@@ -501,21 +466,15 @@ namespace uWS
* which is then removed, and our counters to flip due to overflow and we end up with a crash */
/* The request line is different from the field names / field values */
if ((char *) 4 > (postPaddedBuffer = consumeRequestLine(postPaddedBuffer, end, headers[0], isAncientHTTP))) {
if ((char *) 3 > (postPaddedBuffer = consumeRequestLine(postPaddedBuffer, end, headers[0], isAncientHTTP))) {
/* Error - invalid request line */
/* Assuming it is 505 HTTP Version Not Supported */
switch (reinterpret_cast<uintptr_t>(postPaddedBuffer)) {
case 0x1:
err = HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED;
parserError = HTTP_PARSER_ERROR_INVALID_HTTP_VERSION;
err = HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED;;
break;
case 0x2:
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_REQUEST;
break;
case 0x3:
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_METHOD;
break;
default: {
err = 0;
@@ -524,18 +483,6 @@ namespace uWS
}
return 0;
}
/* No request headers found */
size_t buffer_size = end - postPaddedBuffer;
if(buffer_size < 2) {
/* Fragmented request */
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_REQUEST;
return 0;
}
if(buffer_size >= 2 && postPaddedBuffer[0] == '\r' && postPaddedBuffer[1] == '\n') {
/* No headers found */
return (unsigned int) ((postPaddedBuffer + 2) - start);
}
headers++;
for (unsigned int i = 1; i < UWS_HTTP_MAX_HEADERS_COUNT - 1; i++) {
@@ -552,7 +499,6 @@ namespace uWS
}
/* Error: invalid chars in field name */
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_REQUEST;
return 0;
}
postPaddedBuffer++;
@@ -570,7 +516,6 @@ namespace uWS
}
/* Error - invalid chars in field value */
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_REQUEST;
return 0;
}
break;
@@ -604,7 +549,6 @@ namespace uWS
/* \r\n\r plus non-\n letter is malformed request, or simply out of search space */
if (postPaddedBuffer + 1 < end) {
err = HTTP_ERROR_400_BAD_REQUEST;
parserError = HTTP_PARSER_ERROR_INVALID_REQUEST;
}
return 0;
}
@@ -624,26 +568,26 @@ namespace uWS
* or [consumed, nullptr] for "break; I am closed or upgraded to websocket"
* or [whatever, fullptr] for "break and close me, I am a parser error!" */
template <bool ConsumeMinimally>
std::tuple<unsigned int, HttpParserError, void *> fenceAndConsumePostPadded(bool requireHostHeader, char *data, unsigned int length, void *user, void *reserved, HttpRequest *req, MoveOnlyFunction<void *(void *, HttpRequest *)> &requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &dataHandler) {
std::pair<unsigned int, void *> fenceAndConsumePostPadded(char *data, unsigned int length, void *user, void *reserved, HttpRequest *req, MoveOnlyFunction<void *(void *, HttpRequest *)> &requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &dataHandler) {
/* How much data we CONSUMED (to throw away) */
unsigned int consumedTotal = 0;
unsigned int err = 0;
HttpParserError parserError = HTTP_PARSER_ERROR_NONE;
/* Fence two bytes past end of our buffer (buffer has post padded margins).
* This is to always catch scan for \r but not for \r\n. */
data[length] = '\r';
data[length + 1] = 'a'; /* Anything that is not \n, to trigger "invalid request" */
bool isAncientHTTP = false;
for (unsigned int consumed; length && (consumed = getHeaders(data, data + length, req->headers, reserved, err, parserError, isAncientHTTP)); ) {
for (unsigned int consumed; length && (consumed = getHeaders(data, data + length, req->headers, reserved, err, isAncientHTTP)); ) {
data += consumed;
length -= consumed;
consumedTotal += consumed;
/* Even if we could parse it, check for length here as well */
if (consumed > MAX_FALLBACK_SIZE) {
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, HTTP_PARSER_ERROR_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
}
/* Store HTTP version (ancient 1.0 or 1.1) */
@@ -651,13 +595,13 @@ namespace uWS
/* Add all headers to bloom filter */
req->bf.reset();
for (HttpRequest::Header *h = req->headers; (++h)->key.length(); ) {
req->bf.add(h->key);
}
/* Break if no host header (but we can have empty string which is different from nullptr) */
if (!isAncientHTTP && requireHostHeader && !req->getHeader("host").data()) {
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_MISSING_HOST_HEADER, FULLPTR};
if (!req->getHeader("host").data()) {
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
/* RFC 9112 6.3
@@ -674,7 +618,7 @@ namespace uWS
/* Returning fullptr is the same as calling the errorHandler */
/* We could be smart and set an error in the context along with this, to indicate what
* http error response we might want to return */
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_TRANSFER_ENCODING, FULLPTR};
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
/* Parse query */
@@ -686,17 +630,25 @@ namespace uWS
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
if (remainingStreamingBytes == UINT64_MAX) {
/* Parser error */
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_CONTENT_LENGTH, FULLPTR};
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
}
// lets check if content len is valid before calling requestHandler
if(contentLengthStringLen) {
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
if (remainingStreamingBytes == UINT64_MAX) {
/* Parser error */
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
}
/* If returned socket is not what we put in we need
* to break here as we either have upgraded to
* WebSockets or otherwise closed the socket. */
void *returnedUser = requestHandler(user, req);
if (returnedUser != user) {
/* We are upgraded to WebSocket or otherwise broken */
return {consumedTotal, HTTP_PARSER_ERROR_NONE, returnedUser};
return {consumedTotal, returnedUser};
}
/* The rules at play here according to RFC 9112 for requests are essentially:
@@ -732,7 +684,7 @@ namespace uWS
}
if (isParsingInvalidChunkedEncoding(remainingStreamingBytes)) {
// TODO: what happen if we already responded?
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_CHUNKED_ENCODING, FULLPTR};
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
unsigned int consumed = (length - (unsigned int) dataToConsume.length());
data = (char *) dataToConsume.data();
@@ -761,13 +713,13 @@ namespace uWS
}
/* Whenever we return FULLPTR, the interpretation of "consumed" should be the HttpError enum. */
if (err) {
return {err, parserError, FULLPTR};
return {err, FULLPTR};
}
return {consumedTotal, HTTP_PARSER_ERROR_NONE, user};
return {consumedTotal, user};
}
public:
std::tuple<unsigned int, HttpParserError, void *> consumePostPadded(bool requireHostHeader, char *data, unsigned int length, void *user, void *reserved, MoveOnlyFunction<void *(void *, HttpRequest *)> &&requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &&dataHandler) {
std::pair<unsigned int, void *> consumePostPadded(char *data, unsigned int length, void *user, void *reserved, MoveOnlyFunction<void *(void *, HttpRequest *)> &&requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &&dataHandler) {
/* This resets BloomFilter by construction, but later we also reset it again.
* Optimize this to skip resetting twice (req could be made global) */
@@ -781,7 +733,7 @@ public:
dataHandler(user, chunk, chunk.length() == 0);
}
if (isParsingInvalidChunkedEncoding(remainingStreamingBytes)) {
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_CHUNKED_ENCODING, FULLPTR};
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
data = (char *) dataToConsume.data();
length = (unsigned int) dataToConsume.length();
@@ -791,7 +743,7 @@ public:
if (remainingStreamingBytes >= length) {
void *returnedUser = dataHandler(user, std::string_view(data, length), remainingStreamingBytes == length);
remainingStreamingBytes -= length;
return {0, HTTP_PARSER_ERROR_NONE, returnedUser};
return {0, returnedUser};
} else {
void *returnedUser = dataHandler(user, std::string_view(data, remainingStreamingBytes), true);
@@ -801,7 +753,7 @@ public:
remainingStreamingBytes = 0;
if (returnedUser != user) {
return {0, HTTP_PARSER_ERROR_NONE, returnedUser};
return {0, returnedUser};
}
}
}
@@ -816,19 +768,19 @@ public:
fallback.append(data, maxCopyDistance);
// break here on break
std::tuple<unsigned int, HttpParserError, void *> consumed = fenceAndConsumePostPadded<true>(requireHostHeader,fallback.data(), (unsigned int) fallback.length(), user, reserved, &req, requestHandler, dataHandler);
if (std::get<2>(consumed) != user) {
std::pair<unsigned int, void *> consumed = fenceAndConsumePostPadded<true>(fallback.data(), (unsigned int) fallback.length(), user, reserved, &req, requestHandler, dataHandler);
if (consumed.second != user) {
return consumed;
}
if (std::get<0>(consumed)) {
if (consumed.first) {
/* This logic assumes that we consumed everything in fallback buffer.
* This is critically important, as we will get an integer overflow in case
* of "had" being larger than what we consumed, and that we would drop data */
fallback.clear();
data += std::get<0>(consumed) - had;
length -= std::get<0>(consumed) - had;
data += consumed.first - had;
length -= consumed.first - had;
if (remainingStreamingBytes) {
/* It's either chunked or with a content-length */
@@ -838,7 +790,7 @@ public:
dataHandler(user, chunk, chunk.length() == 0);
}
if (isParsingInvalidChunkedEncoding(remainingStreamingBytes)) {
return {HTTP_ERROR_400_BAD_REQUEST, HTTP_PARSER_ERROR_INVALID_CHUNKED_ENCODING, FULLPTR};
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
}
data = (char *) dataToConsume.data();
length = (unsigned int) dataToConsume.length();
@@ -847,7 +799,7 @@ public:
if (remainingStreamingBytes >= (unsigned int) length) {
void *returnedUser = dataHandler(user, std::string_view(data, length), remainingStreamingBytes == (unsigned int) length);
remainingStreamingBytes -= length;
return {0, HTTP_PARSER_ERROR_NONE, returnedUser};
return {0, returnedUser};
} else {
void *returnedUser = dataHandler(user, std::string_view(data, remainingStreamingBytes), true);
@@ -857,7 +809,7 @@ public:
remainingStreamingBytes = 0;
if (returnedUser != user) {
return {0, HTTP_PARSER_ERROR_NONE, returnedUser};
return {0, returnedUser};
}
}
}
@@ -865,30 +817,30 @@ public:
} else {
if (fallback.length() == MAX_FALLBACK_SIZE) {
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, HTTP_PARSER_ERROR_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
}
return {0, HTTP_PARSER_ERROR_NONE, user};
return {0, user};
}
}
std::tuple<unsigned int, HttpParserError, void *> consumed = fenceAndConsumePostPadded<false>(requireHostHeader,data, length, user, reserved, &req, requestHandler, dataHandler);
if (std::get<2>(consumed) != user) {
std::pair<unsigned int, void *> consumed = fenceAndConsumePostPadded<false>(data, length, user, reserved, &req, requestHandler, dataHandler);
if (consumed.second != user) {
return consumed;
}
data += std::get<0>(consumed);
length -= std::get<0>(consumed);
data += consumed.first;
length -= consumed.first;
if (length) {
if (length < MAX_FALLBACK_SIZE) {
fallback.append(data, length);
} else {
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, HTTP_PARSER_ERROR_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
return {HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE, FULLPTR};
}
}
// added for now
return {0, HTTP_PARSER_ERROR_NONE, user};
return {0, user};
}
};

View File

@@ -331,7 +331,7 @@ public:
/* We should only mark this if inside the parser; if upgrading "async" we cannot set this */
HttpContextData<SSL> *httpContextData = httpContext->getSocketContextData();
if (httpContextData->flags.isParsingHttp) {
if (httpContextData->isParsingHttp) {
/* We need to tell the Http parser that we changed socket */
httpContextData->upgradedWebSocket = webSocket;
}
@@ -462,28 +462,6 @@ public:
return internalEnd({nullptr, 0}, 0, false, false, closeConnection);
}
void flushHeaders() {
writeStatus(HTTP_200_OK);
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WROTE_CONTENT_LENGTH_HEADER) && !httpResponseData->fromAncientRequest) {
if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
/* Write mark on first call to write */
writeMark();
writeHeader("Transfer-Encoding", "chunked");
Super::write("\r\n", 2);
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
}
} else if (!(httpResponseData->state & HttpResponseData<SSL>::HTTP_WRITE_CALLED)) {
writeMark();
Super::write("\r\n", 2);
httpResponseData->state |= HttpResponseData<SSL>::HTTP_WRITE_CALLED;
}
}
/* Write parts of the response in chunking fashion. Starts timeout if failed. */
bool write(std::string_view data, size_t *writtenPtr = nullptr) {
writeStatus(HTTP_200_OK);

View File

@@ -1,14 +0,0 @@
#!/bin/sh
if ! test -d build/debug/codegen; then
echo "Missing codegen"
exit 1
fi
out="codegen-for-zig-team.tar.gz"
tar --no-xattrs \
--exclude=".DS_Store" \
-zcf "$out" \
build/debug/codegen \
src/bun.js/bindings/GeneratedBindings.zig \
src/bun.js/bindings/GeneratedJS2Native.zig
echo "-> $out"

View File

@@ -7,7 +7,7 @@
// - It cannot use Bun APIs, since it is run using Node.js.
// - It does not import dependencies, so it's faster to start.
import { spawn, spawnSync } from "node:child_process";
import { spawn as nodeSpawn, spawnSync as nodeSpawnSync } from "node:child_process";
import { createHash } from "node:crypto";
import {
accessSync,
@@ -59,6 +59,7 @@ const spawnTimeout = 5_000;
const testTimeout = 3 * 60_000;
const integrationTimeout = 5 * 60_000;
const napiTimeout = 10 * 60_000;
const vendorTimeout = 10_000;
function getNodeParallelTestTimeout(testPath) {
if (testPath.includes("test-dns")) {
@@ -137,6 +138,10 @@ const { values: options, positionals: filters } = parseArgs({
type: "boolean",
default: isBuildkite,
},
["update-regressions"]: {
type: "boolean",
default: false,
},
},
});
@@ -200,8 +205,14 @@ async function runTests() {
*/
const runTest = async (title, fn) => {
const index = ++i;
const isVendor = title.startsWith("vendor/");
let result, failure, flaky;
/** @type {TestResult} */
let result;
/** @type {TestResult | undefined} */
let failure;
/** @type {boolean} */
let flaky;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
if (attempt > 1) {
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
@@ -239,25 +250,95 @@ async function runTests() {
}
}
if (!failure) {
/** @type {TestEntry[]} */
const regressions = [];
/** @type {TestEntry[]} */
const regressionsFixed = [];
if (isVendor) {
const { tests, error, testPath } = result;
if (error && !error.endsWith("failing")) {
tests.push({ file: testPath, test: error, status: "fail" });
}
const errors = tests.filter(({ status }) => status === "fail").map(({ file, test }) => `${file} > ${test}`);
if (errors.length) {
const knownErrors =
vendorTests?.flatMap(({ testFailures }) => testFailures)?.filter(error => error.startsWith(testPath)) || [];
const regressedErrors = tests.filter(
({ status, file, test }) => status === "fail" && !knownErrors.includes(`${file} > ${test}`),
);
for (const error of regressedErrors) {
regressions.push(error);
}
const fixedErrors = knownErrors.filter(error => !errors.includes(error));
for (const error of fixedErrors) {
const [file, ...labels] = error.split(" > ");
regressionsFixed.push({
file,
test: labels.join(" > "),
status: "pass",
});
}
}
if (options["update-regressions"]) {
const testFailures = regressions.map(({ file, test }) => `${file} > ${test}`);
const vendorPath = join(cwd, "test", "vendor.json");
/** @type {Vendor[]} */
const vendorJson = JSON.parse(readFileSync(vendorPath, "utf-8"));
const [, vendorName] = title.split("/");
const vendorPackage = vendorJson.find(({ package: name }) => name === vendorName);
if (vendorPackage) {
const existingTestFailures = vendorPackage.testFailures || [];
const updatedTestFailures = Array.from(
new Set([...existingTestFailures.filter(error => !error.startsWith(title)), ...testFailures]),
).sort();
const vendorIndex = vendorJson.findIndex(({ package: name }) => name === vendorName);
const vendorUpdatedPackage = {
...vendorPackage,
testFailures: updatedTestFailures?.length ? updatedTestFailures : undefined,
};
if (vendorIndex === -1) {
vendorJson.push(vendorUpdatedPackage);
} else {
vendorJson[vendorIndex] = vendorUpdatedPackage;
}
writeFileSync(vendorPath, JSON.stringify(vendorJson, null, 2));
}
}
}
if (isBuildkite && regressionsFixed.length) {
const content = formatTestToMarkdown(
{ ...result, tests: regressionsFixed, testPath: title },
{ includeOk: true },
);
reportAnnotationToBuildKite({ context: "regressions", label: title, content, style: "info" });
}
if (!failure || (isVendor && !regressions.length)) {
return result;
}
if (isBuildkite) {
// Group flaky tests together, regardless of the title
const context = flaky ? "flaky" : title;
const style = flaky || title.startsWith("vendor") ? "warning" : "error";
const context = flaky && !isVendor ? "flaky" : title;
const style = flaky || isVendor ? "warning" : "error";
if (title.startsWith("vendor")) {
const content = formatTestToMarkdown({ ...failure, testPath: title });
if (content) {
reportAnnotationToBuildKite({ context, label: title, content, style });
let content;
if (isVendor) {
if (regressions.length) {
content = formatTestToMarkdown({ ...failure, tests: regressions, testPath: title });
} else {
content = formatTestToMarkdown({ ...failure, testPath: title });
}
} else {
const content = formatTestToMarkdown(failure);
if (content) {
reportAnnotationToBuildKite({ context, label: title, content, style });
}
content = formatTestToMarkdown(failure);
}
if (content) {
reportAnnotationToBuildKite({ context, label: title, content, style });
}
}
@@ -267,7 +348,7 @@ async function runTests() {
const longMarkdown = formatTestToMarkdown(failure);
appendFileSync(summaryPath, longMarkdown);
}
const shortMarkdown = formatTestToMarkdown(failure, true);
const shortMarkdown = formatTestToMarkdown(failure, { concise: true });
appendFileSync("comment.md", shortMarkdown);
}
@@ -556,7 +637,7 @@ async function spawnSafe(options) {
};
await new Promise(resolve => {
try {
subprocess = spawn(command, args, {
subprocess = nodeSpawn(command, args, {
stdio: ["ignore", "pipe", "pipe"],
timeout,
cwd,
@@ -667,6 +748,19 @@ async function spawnSafe(options) {
};
}
/**
* @param {SpawnOptions} options
* @returns {Promise<SpawnResult>}
*/
async function spawn(options) {
const result = await spawnSafe(options);
const { error, stdout } = result;
if (error) {
throw new Error(`Command failed: ${error}`, { cause: stdout });
}
return result;
}
/**
* @param {string} execPath Path to bun binary
* @param {SpawnOptions} options
@@ -769,10 +863,11 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
* @returns {Promise<TestResult>}
*/
async function spawnBunTest(execPath, testPath, options = { cwd }) {
const timeout = getTestTimeout(testPath);
const isVendor = options.cwd?.includes("vendor");
const timeout = isVendor ? vendorTimeout : getTestTimeout(testPath);
const perTestTimeout = Math.ceil(timeout / 2);
const absPath = join(options["cwd"], testPath);
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
const isReallyTest = isTestStrict(testPath) || isVendor;
const args = options["args"] ?? [];
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`];
@@ -1072,9 +1167,6 @@ function isHidden(path) {
return /node_modules|node.js/.test(dirname(path)) || /^\./.test(basename(path));
}
/** Files with these extensions are not treated as test cases */
const IGNORED_EXTENSIONS = new Set([".md"]);
/**
* @param {string} cwd
* @returns {string[]}
@@ -1084,9 +1176,8 @@ function getTests(cwd) {
const dirname = join(cwd, path);
for (const entry of readdirSync(dirname, { encoding: "utf-8", withFileTypes: true })) {
const { name } = entry;
const ext = name.slice(name.lastIndexOf("."));
const filename = join(path, name);
if (isHidden(filename) || IGNORED_EXTENSIONS.has(ext)) {
if (isHidden(filename)) {
continue;
}
if (entry.isFile() && isTest(filename)) {
@@ -1105,10 +1196,9 @@ function getTests(cwd) {
* @property {string} repository
* @property {string} tag
* @property {string} [packageManager]
* @property {string} [testPath]
* @property {string} [testRunner]
* @property {string[]} [testExtensions]
* @property {boolean | Record<string, boolean | string>} [skipTests]
* @property {string[]} [testPatterns]
* @property {string[]} [testFailures]
*/
/**
@@ -1117,6 +1207,7 @@ function getTests(cwd) {
* @property {string} packageManager
* @property {string} testRunner
* @property {string[]} testPaths
* @property {string[]} testFailures
*/
/**
@@ -1151,73 +1242,83 @@ async function getVendorTests(cwd) {
return Promise.all(
relevantVendors.map(
async ({ package: name, repository, tag, testPath, testExtensions, testRunner, packageManager, skipTests }) => {
async ({ package: name, repository, tag, testPatterns, testFailures, testRunner, packageManager }) => {
const vendorPath = join(cwd, "vendor", name);
if (!existsSync(vendorPath)) {
await spawnSafe({
await spawn({
command: "git",
args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath],
timeout: testTimeout,
cwd,
});
}
await spawnSafe({
command: "git",
args: ["fetch", "--depth", "1", "origin", "tag", tag],
timeout: testTimeout,
cwd: vendorPath,
});
await spawn({
command: "git",
args: ["fetch", "--depth", "1", "origin", "tag", tag],
timeout: testTimeout,
cwd: vendorPath,
});
}
const packageJsonPath = join(vendorPath, "package.json");
if (!existsSync(packageJsonPath)) {
throw new Error(`Vendor '${name}' does not have a package.json: ${packageJsonPath}`);
}
const testPathPrefix = testPath || "test";
const testParentPath = join(vendorPath, testPathPrefix);
if (!existsSync(testParentPath)) {
throw new Error(`Vendor '${name}' does not have a test directory: ${testParentPath}`);
}
const isTest = path => {
if (!isJavaScriptTest(path)) {
return false;
}
if (typeof skipTests === "boolean") {
return !skipTests;
}
if (typeof skipTests === "object") {
for (const [glob, reason] of Object.entries(skipTests)) {
const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`);
if (pattern.test(path) && reason) {
return false;
}
const testParentPaths = new Set();
if (testPatterns?.length) {
for (const pattern of testPatterns) {
const endOfPath = pattern.lastIndexOf("/");
const endOfGlob = pattern.lastIndexOf("*");
if (endOfPath === -1 || endOfGlob < endOfPath) {
continue;
}
const testPath = pattern.substring(0, endOfPath);
if (existsSync(join(vendorPath, testPath))) {
testParentPaths.add(testPath);
}
}
} else if (existsSync(join(vendorPath, "test"))) {
testParentPaths.add("test");
}
return true;
if (!testParentPaths.size) {
throw new Error(
`Could not find test directory for vendor '${name}' (hint: set 'testPatterns' in vendor.json)`,
);
}
const isMatch = (filename, glob) => {
const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`);
return pattern.test(filename);
};
const testPaths = readdirSync(testParentPath, { encoding: "utf-8", recursive: true })
.filter(filename =>
testExtensions ? testExtensions.some(ext => filename.endsWith(`.${ext}`)) : isTest(filename),
)
.map(filename => join(testPathPrefix, filename))
.filter(
filename =>
!filters?.length ||
filters.some(filter => join(vendorPath, filename).replace(/\\/g, "/").includes(filter)),
);
const testPaths = Array.from(testParentPaths).flatMap(testParentPath =>
readdirSync(join(vendorPath, testParentPath), { encoding: "utf-8", recursive: true })
.filter(filename =>
testPatterns?.length
? testPatterns.some(pattern => isMatch(join(testParentPath, filename).replace(/\\/g, "/"), pattern))
: isJavaScriptTest(filename),
)
.map(filename => join(testParentPath, filename))
.filter(
filename =>
!filters?.length ||
filters.some(filter => join(vendorPath, filename).replace(/\\/g, "/").includes(filter)),
),
);
if (!testPaths.length && !filters?.length) {
throw new Error(`Could not find test files for vendor '${name}' (hint: set 'testPatterns' in vendor.json)`);
}
return {
cwd: vendorPath,
packageManager: packageManager || "bun",
testRunner: testRunner || "bun",
testPaths,
testFailures: testFailures || [],
};
},
),
@@ -1319,7 +1420,7 @@ function getExecPath(bunExe) {
let execPath;
let error;
try {
const { error, stdout } = spawnSync(bunExe, ["--print", "process.argv[0]"], {
const { error, stdout } = nodeSpawnSync(bunExe, ["--print", "process.argv[0]"], {
encoding: "utf-8",
timeout: spawnTimeout,
env: {
@@ -1405,7 +1506,7 @@ async function getExecPathFromBuildKite(target, buildId) {
*/
function getRevision(execPath) {
try {
const { error, stdout } = spawnSync(execPath, ["--revision"], {
const { error, stdout } = nodeSpawnSync(execPath, ["--revision"], {
encoding: "utf-8",
timeout: spawnTimeout,
env: {
@@ -1443,10 +1544,13 @@ function getTestLabel() {
/**
* @param {TestResult | TestResult[]} result
* @param {boolean} concise
* @param {object} options
* @param {boolean} options.concise
* @param {boolean} options.includeOk
* @returns {string}
*/
function formatTestToMarkdown(result, concise) {
function formatTestToMarkdown(result, options = {}) {
const { concise = false, includeOk = false } = options;
const results = Array.isArray(result) ? result : [result];
const buildLabel = getTestLabel();
const buildUrl = getBuildUrl();
@@ -1454,7 +1558,7 @@ function formatTestToMarkdown(result, concise) {
let markdown = "";
for (const { testPath, ok, tests, error, stdoutPreview: stdout } of results) {
if (ok || error === "SIGTERM") {
if ((ok && !includeOk) || error === "SIGTERM") {
continue;
}
@@ -1486,6 +1590,8 @@ function formatTestToMarkdown(result, concise) {
}
if (error) {
markdown += ` - ${error}`;
} else if (ok) {
markdown += ` - ok`;
}
if (platform) {
markdown += ` on ${platform}`;
@@ -1513,7 +1619,7 @@ function formatTestToMarkdown(result, concise) {
* @param {string} glob
*/
function uploadArtifactsToBuildKite(glob) {
spawn("buildkite-agent", ["artifact", "upload", glob], {
nodeSpawnSync("buildkite-agent", ["artifact", "upload", glob], {
stdio: ["ignore", "ignore", "ignore"],
timeout: spawnTimeout,
cwd,
@@ -1538,7 +1644,7 @@ function listArtifactsFromBuildKite(glob, step) {
if (step) {
args.push("--step", step);
}
const { error, status, signal, stdout, stderr } = spawnSync("buildkite-agent", args, {
const { error, status, signal, stdout, stderr } = nodeSpawnSync("buildkite-agent", args, {
stdio: ["ignore", "ignore", "ignore"],
encoding: "utf-8",
timeout: spawnTimeout,
@@ -2006,10 +2112,13 @@ export async function main() {
// It also appears to hang on 1.1.1.1, which could explain this issue:
// https://github.com/oven-sh/bun/issues/11136
if (isWindows && isCI) {
await spawn("pwsh", [
"-Command",
"Set-DnsClientServerAddress -InterfaceAlias 'Ethernet 4' -ServerAddresses ('8.8.8.8','8.8.4.4')",
]);
await spawn({
command: "pwsh",
args: [
"-Command",
"Set-DnsClientServerAddress -InterfaceAlias 'Ethernet 4' -ServerAddresses ('8.8.8.8','8.8.4.4')",
],
});
}
const results = await runTests();

View File

@@ -5,7 +5,7 @@ const Output = @import("output.zig");
const use_mimalloc = bun.use_mimalloc;
const StringTypes = @import("./string_types.zig");
const Mimalloc = bun.Mimalloc;
const bun = @import("bun");
const bun = @import("root").bun;
const version_string = Environment.version_string;
@@ -123,10 +123,7 @@ pub fn exit(code: u32) noreturn {
Bun__onExit();
std.os.windows.kernel32.ExitProcess(code);
},
else => {
bun.c.quick_exit(@bitCast(code));
std.c.abort(); // quick_exit should be noreturn
},
else => bun.C.quick_exit(@bitCast(code)),
}
}

View File

@@ -1,5 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const ImportRecord = @import("./import_record.zig").ImportRecord;
const ImportKind = @import("./import_record.zig").ImportKind;

View File

@@ -24,7 +24,7 @@
const std = @import("std");
const builtin = @import("builtin");
const bun = @import("bun");
const bun = @import("root").bun;
const assert = bun.assert;
const testing = std.testing;
const Thread = std.Thread;

View File

@@ -46,10 +46,10 @@ pub const FileOperation = struct {
close_handle_on_complete: bool = false,
autowatch: bool = true,
pub fn fromFile(fd: bun.FD, pathname: string) FileOperation {
pub fn fromFile(fd: anytype, pathname: string) FileOperation {
return .{
.fd = fd,
.pathname = pathname,
.fd = bun.toFD(fd),
};
}
@@ -128,7 +128,7 @@ pub const SavedFile = struct {
const store = JSC.WebCore.Blob.Store.initFile(
JSC.Node.PathOrFileDescriptor{
.path = JSC.Node.PathLike{
.string = bun.PathString.init(path),
.string = JSC.PathString.init(path),
},
},
mime_type,
@@ -166,7 +166,7 @@ pub fn initFile(file: std.fs.File, pathname: string, size: usize) OutputFile {
pub fn initFileWithDir(file: std.fs.File, pathname: string, size: usize, dir: std.fs.Dir) OutputFile {
var res = initFile(file, pathname, size);
res.value.copy.dir_handle = .fromStdDir(dir);
res.value.copy.dir_handle = bun.toFD(dir.fd);
return res;
}
@@ -220,8 +220,8 @@ pub fn init(options: Options) OutputFile {
.buffer => |buffer| Value{ .buffer = .{ .allocator = buffer.allocator, .bytes = buffer.data } },
.file => |file| Value{
.copy = brk: {
var op = FileOperation.fromFile(.fromStdFile(file.file), options.output_path);
op.dir = .fromStdDir(file.dir);
var op = FileOperation.fromFile(file.file.handle, options.output_path);
op.dir = bun.toFD(file.dir.fd);
break :brk op;
},
},
@@ -251,7 +251,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u
}
var path_buf: bun.PathBuffer = undefined;
_ = try JSC.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
_ = try JSC.Node.NodeFS.writeFileWithPathBuffer(&path_buf, .{
.data = .{ .buffer = .{
.buffer = .{
.ptr = @constCast(value.bytes.ptr),
@@ -261,32 +261,36 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u
} },
.encoding = .buffer,
.mode = if (f.is_executable) 0o755 else 0o644,
.dirfd = .fromStdDir(root_dir),
.dirfd = bun.toFD(root_dir.fd),
.file = .{ .path = .{
.string = bun.PathString.init(rel_path),
.string = JSC.PathString.init(rel_path),
} },
}).unwrap();
},
.move => |value| {
try f.moveTo(root_dir_path, value.pathname, .fromStdDir(root_dir));
try f.moveTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
},
.copy => |value| {
try f.copyTo(root_dir_path, value.pathname, .fromStdDir(root_dir));
try f.copyTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
},
.pending => unreachable,
}
}
pub fn moveTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
try bun.sys.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0));
try bun.C.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0));
}
pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
const fd_out = bun.FD.fromStdFile(try dir.stdDir().createFile(rel_path, .{}));
const file_out = (try dir.asDir().createFile(rel_path, .{}));
const fd_out = file_out.handle;
var do_close = false;
const fd_in = bun.FD.fromStdFile(try std.fs.openFileAbsolute(file.src_path.text, .{ .mode = .read_only }));
const fd_in = (try std.fs.openFileAbsolute(file.src_path.text, .{ .mode = .read_only })).handle;
if (Environment.isWindows) {
Fs.FileSystem.setMaxFd(fd_out);
Fs.FileSystem.setMaxFd(fd_in);
do_close = Fs.FileSystem.instance.fs.needToCloseFiles();
// use paths instead of bun.getFdPathW()
@@ -295,8 +299,8 @@ pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: Fil
defer {
if (do_close) {
fd_out.close();
fd_in.close();
_ = bun.sys.close(bun.toFD(fd_out));
_ = bun.sys.close(bun.toFD(fd_in));
}
}
@@ -313,7 +317,7 @@ pub fn toJS(
.noop => JSC.JSValue.undefined,
.copy => |copy| brk: {
const file_blob = JSC.WebCore.Blob.Store.initFile(
if (copy.fd.isValid())
if (copy.fd != .zero)
JSC.Node.PathOrFileDescriptor{
.fd = copy.fd,
}
@@ -417,7 +421,7 @@ pub fn toBlob(
.noop => @panic("Cannot convert noop output file to blob"),
.copy => |copy| brk: {
const file_blob = try JSC.WebCore.Blob.Store.initFile(
if (copy.fd.isValid())
if (copy.fd != .zero)
JSC.Node.PathOrFileDescriptor{
.fd = copy.fd,
}
@@ -483,7 +487,7 @@ const string = []const u8;
const FileDescriptorType = bun.FileDescriptor;
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const JSC = bun.JSC;
const Fs = bun.fs;
const Loader = @import("./options.zig").Loader;

View File

@@ -20,7 +20,7 @@ const windows = std.os.windows;
const testing = std.testing;
const assert = bun.assert;
const Progress = @This();
const bun = @import("bun");
const bun = @import("root").bun;
/// `null` if the current node (and its children) should
/// not print on update()

View File

@@ -1,7 +1,7 @@
//! Originally, we tried using LIEF to inject the module graph into a MachO segment
//! But this incurred a fixed 350ms overhead on every build, which is unacceptable
//! so we give up on codesigning support on macOS for now until we can find a better solution
const bun = @import("bun");
const bun = @import("root").bun;
const std = @import("std");
const Schema = bun.Schema.Api;
const strings = bun.strings;
@@ -136,7 +136,7 @@ pub const StandaloneModuleGraph = struct {
loader: bun.options.Loader,
contents: [:0]const u8 = "",
sourcemap: LazySourceMap,
cached_blob: ?*bun.webcore.Blob = null,
cached_blob: ?*bun.JSC.WebCore.Blob = null,
encoding: Encoding = .binary,
wtf_string: bun.String = bun.String.empty,
bytecode: []u8 = "",
@@ -171,13 +171,13 @@ pub const StandaloneModuleGraph = struct {
return this.wtf_string.dupeRef();
}
pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.webcore.Blob {
pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.JSC.WebCore.Blob {
if (this.cached_blob == null) {
const store = bun.webcore.Blob.Store.init(@constCast(this.contents), bun.default_allocator);
const store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator);
// make it never free
store.ref();
const b = bun.webcore.Blob.initWithStore(store, globalObject).new();
const b = bun.JSC.WebCore.Blob.initWithStore(store, globalObject).new();
b.allocator = bun.default_allocator;
if (bun.http.MimeType.byExtensionNoDefault(bun.strings.trimLeadingChar(std.fs.path.extension(this.name), '.'))) |mime| {
@@ -486,7 +486,7 @@ pub const StandaloneModuleGraph = struct {
// Make the file writable so we can delete it
_ = Syscall.fchmod(fd, 0o777);
}
fd.close();
_ = Syscall.close(fd);
_ = Syscall.unlink(name);
}
}.toClean;
@@ -579,7 +579,7 @@ pub const StandaloneModuleGraph = struct {
}
unreachable;
};
const self_fd: bun.FileDescriptor = brk2: {
const self_fd = brk2: {
for (0..3) |retry| {
switch (Syscall.open(self_exe, bun.O.CLOEXEC | bun.O.RDONLY, 0)) {
.result => |res| break :brk2 res,
@@ -601,9 +601,9 @@ pub const StandaloneModuleGraph = struct {
unreachable;
};
defer self_fd.close();
defer _ = Syscall.close(self_fd);
bun.copyFile(self_fd, fd).unwrap() catch |err| {
bun.copyFile(self_fd.cast(), fd.cast()).unwrap() catch |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
cleanup(zname, fd);
Global.exit(1);
@@ -659,7 +659,7 @@ pub const StandaloneModuleGraph = struct {
Global.exit(1);
};
if (comptime !Environment.isWindows) {
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
}
return cloned_executable_fd;
},
@@ -727,7 +727,7 @@ pub const StandaloneModuleGraph = struct {
// the final 8 bytes in the file are the length of the module graph with padding, excluding the trailer and offsets
_ = Syscall.write(cloned_executable_fd, std.mem.asBytes(&total_byte_count));
if (comptime !Environment.isWindows) {
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
}
return cloned_executable_fd;
@@ -791,7 +791,7 @@ pub const StandaloneModuleGraph = struct {
.{ .windows_hide_console = windows_hide_console },
target,
);
bun.debugAssert(fd.kind == .system);
fd.assertKind(.system);
if (Environment.isWindows) {
var outfile_buf: bun.OSPathBuffer = undefined;
@@ -803,18 +803,18 @@ pub const StandaloneModuleGraph = struct {
break :brk outfile_buf_u16[0..outfile_w.len :0];
};
bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| {
bun.C.moveOpenedFileAtLoose(fd, bun.toFD(root_dir.fd), outfile_slice, true).unwrap() catch |err| {
if (err == error.EISDIR) {
Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.utf16(outfile_slice)});
} else {
Output.err(err, "failed to move executable to result path", .{});
}
_ = bun.windows.deleteOpenedFile(fd);
_ = bun.C.deleteOpenedFile(fd);
Global.exit(1);
};
fd.close();
_ = bun.sys.close(fd);
if (windows_icon) |icon_utf8| {
var icon_buf: bun.OSPathBuffer = undefined;
@@ -832,11 +832,11 @@ pub const StandaloneModuleGraph = struct {
Global.exit(1);
};
bun.sys.moveFileZWithHandle(
bun.C.moveFileZWithHandle(
fd,
bun.FD.cwd(),
bun.sliceTo(&(try std.posix.toPosixPath(temp_location)), 0),
.fromStdDir(root_dir),
bun.toFD(root_dir.fd),
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
) catch |err| {
if (err == error.IsDir) {
@@ -871,7 +871,7 @@ pub const StandaloneModuleGraph = struct {
// Do not invoke libuv here.
const self_exe = openSelf() catch return null;
defer self_exe.close();
defer _ = Syscall.close(self_exe);
var trailer_bytes: [4096]u8 = undefined;
std.posix.lseek_END(self_exe.cast(), -4096) catch return null;
@@ -1010,7 +1010,7 @@ pub const StandaloneModuleGraph = struct {
switch (Environment.os) {
.linux => {
if (std.fs.openFileAbsoluteZ("/proc/self/exe", .{})) |easymode| {
return .fromStdFile(easymode);
return bun.toFD(easymode.handle);
} else |_| {
if (bun.argv.len > 0) {
// The user doesn't have /proc/ mounted, so now we just guess and hope for the best.
@@ -1021,7 +1021,7 @@ pub const StandaloneModuleGraph = struct {
"",
bun.argv[0],
)) |path| {
return .fromStdFile(try std.fs.cwd().openFileZ(path, .{}));
return bun.toFD((try std.fs.cwd().openFileZ(path, .{})).handle);
}
}
@@ -1033,7 +1033,7 @@ pub const StandaloneModuleGraph = struct {
// opened with no modification.
const self_exe_path = try bun.selfExePath();
const file = try std.fs.openFileAbsoluteZ(self_exe_path.ptr, .{});
return .fromStdFile(file);
return bun.toFD(file.handle);
},
.windows => {
const image_path_unicode_string = std.os.windows.peb().ProcessParameters.ImagePathName;
@@ -1050,7 +1050,7 @@ pub const StandaloneModuleGraph = struct {
}
return bun.sys.openFileAtWindows(
.cwd(),
bun.FileDescriptor.cwd(),
nt_path,
.{
.access_mask = w.SYNCHRONIZE | w.GENERIC_READ,

View File

@@ -6,7 +6,7 @@ const mem = std.mem;
const math = std.math;
const testing = std.testing;
const bun = @import("bun");
const bun = @import("root").bun;
const assert = bun.assert;
pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
@@ -57,20 +57,7 @@ pub fn StaticHashMap(comptime K: type, comptime V: type, comptime Context: type,
// get_probe_count: usize = 0,
// del_probe_count: usize = 0,
const impl = HashMapMixin(Self, K, V, Context);
pub const putAssumeCapacity = impl.putAssumeCapacity;
pub const slice = impl.slice;
pub const clearRetainingCapacity = impl.clearRetainingCapacity;
pub const putAssumeCapacityContext = impl.putAssumeCapacityContext;
pub const getOrPutAssumeCapacity = impl.getOrPutAssumeCapacity;
pub const getOrPutAssumeCapacityContext = impl.getOrPutAssumeCapacityContext;
pub const get = impl.get;
pub const getContext = impl.getContext;
pub const has = impl.has;
pub const hasWithHash = impl.hasWithHash;
pub const hasContext = impl.hasContext;
pub const delete = impl.delete;
pub const deleteContext = impl.deleteContext;
pub usingnamespace HashMapMixin(Self, K, V, Context);
};
}
@@ -109,20 +96,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime Context: type, compt
// get_probe_count: usize = 0,
// del_probe_count: usize = 0,
const impl = HashMapMixin(Self, K, V, Context);
pub const putAssumeCapacity = impl.putAssumeCapacity;
pub const slice = impl.slice;
pub const clearRetainingCapacity = impl.clearRetainingCapacity;
pub const putAssumeCapacityContext = impl.putAssumeCapacityContext;
pub const getOrPutAssumeCapacity = impl.getOrPutAssumeCapacity;
pub const getOrPutAssumeCapacityContext = impl.getOrPutAssumeCapacityContext;
pub const get = impl.get;
pub const getContext = impl.getContext;
pub const has = impl.has;
pub const hasWithHash = impl.hasWithHash;
pub const hasContext = impl.hasContext;
pub const delete = impl.delete;
pub const deleteContext = impl.deleteContext;
pub usingnamespace HashMapMixin(Self, K, V, Context);
pub fn initCapacity(gpa: mem.Allocator, capacity: u64) !Self {
assert(math.isPowerOfTwo(capacity));

View File

@@ -111,7 +111,7 @@ pub fn deinit(this: *Watcher, close_descriptors: bool) void {
if (close_descriptors and this.running) {
const fds = this.watchlist.items(.fd);
for (fds) |fd| {
fd.close();
_ = bun.sys.close(fd);
}
}
this.watchlist.deinit(this.allocator);
@@ -236,7 +236,7 @@ fn threadMain(this: *Watcher) !void {
if (this.close_descriptors) {
const fds = this.watchlist.items(.fd);
for (fds) |fd| {
fd.close();
_ = bun.sys.close(fd);
}
}
this.watchlist.deinit(this.allocator);
@@ -271,7 +271,7 @@ pub fn flushEvictions(this: *Watcher) void {
// on mac and linux we can just close the file descriptor
// we don't need to call inotify_rm_watch on linux because it gets removed when the file descriptor is closed
if (fds[item].isValid()) {
fds[item].close();
_ = bun.sys.close(fds[item]);
}
}
last_item = item;
@@ -347,7 +347,7 @@ fn appendFileAssumeCapacity(
event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE;
// id
event.ident = @intCast(fd.native());
event.ident = @intCast(fd.int());
// Store the hash for fast filtering later
event.udata = @as(usize, @intCast(watchlist_id));
@@ -358,7 +358,7 @@ fn appendFileAssumeCapacity(
// - We register the event here.
// our while(true) loop above receives notification of changes to any of the events created here.
_ = std.posix.system.kevent(
this.platform.fd.unwrap().?.native(),
this.platform.fd.cast(),
@as([]KEvent, events[0..1]).ptr,
1,
@as([]KEvent, events[0..1]).ptr,
@@ -399,7 +399,7 @@ fn appendDirectoryAssumeCapacity(
}
const fd = brk: {
if (stored_fd.isValid()) break :brk stored_fd;
if (stored_fd != .zero) break :brk stored_fd;
break :brk switch (bun.sys.openA(file_path, 0, 0)) {
.err => |err| return .{ .err = err },
.result => |fd| fd,
@@ -443,7 +443,7 @@ fn appendDirectoryAssumeCapacity(
event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE;
// id
event.ident = @intCast(fd.native());
event.ident = @intCast(fd.int());
// Store the hash for fast filtering later
event.udata = @as(usize, @intCast(watchlist_id));
@@ -454,7 +454,7 @@ fn appendDirectoryAssumeCapacity(
// - We register the event here.
// our while(true) loop above receives notification of changes to any of the events created here.
_ = std.posix.system.kevent(
this.platform.fd.unwrap().?.native(),
this.platform.fd.cast(),
@as([]KEvent, events[0..1]).ptr,
1,
@as([]KEvent, events[0..1]).ptr,
@@ -505,7 +505,7 @@ pub fn appendFileMaybeLock(
if (autowatch_parent_dir) {
var watchlist_slice = this.watchlist.slice();
if (dir_fd.isValid()) {
if (dir_fd != .zero) {
const fds = watchlist_slice.items(.fd);
if (std.mem.indexOfScalar(bun.FileDescriptor, fds, dir_fd)) |i| {
parent_watch_item = @as(WatchItemIndex, @truncate(i));
@@ -607,7 +607,7 @@ pub fn addFile(
if (this.indexOf(hash)) |index| {
if (comptime FeatureFlags.atomic_file_watcher) {
// On Linux, the file descriptor might be out of date.
if (fd.isValid()) {
if (fd.int() > 0) {
var fds = this.watchlist.items(.fd);
fds[index] = fd;
}
@@ -665,7 +665,7 @@ pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: bun.Sto
}
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -3,7 +3,7 @@ const std = @import("std");
const FeatureFlags = @import("./feature_flags.zig");
const Environment = @import("./env.zig");
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
const bun = @import("bun");
const bun = @import("root").bun;
const OOM = bun.OOM;
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
@@ -28,7 +28,7 @@ pub fn sliceRange(slice: []const u8, buffer: []const u8) ?[2]u32 {
null;
}
pub const IndexType = packed struct(u32) {
pub const IndexType = packed struct {
index: u31,
is_overflow: bool = false,
};

View File

@@ -237,6 +237,6 @@ pub inline fn downcast(a: Allocator) ?*AllocationScope {
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("bun");
const bun = @import("root").bun;
const Output = bun.Output;
const StoredTrace = bun.crash_handler.StoredTrace;

View File

@@ -1,90 +0,0 @@
const MemoryReportingAllocator = @This();
const log = bun.Output.scoped(.MEM, false);
child_allocator: std.mem.Allocator,
memory_cost: std.atomic.Value(usize) = std.atomic.Value(usize).init(0),
fn alloc(context: *anyopaque, n: usize, alignment: std.mem.Alignment, return_address: usize) ?[*]u8 {
const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context));
const result = this.child_allocator.rawAlloc(n, alignment, return_address) orelse return null;
_ = this.memory_cost.fetchAdd(n, .monotonic);
if (comptime Environment.allow_assert)
log("malloc({d}) = {d}", .{ n, this.memory_cost.raw });
return result;
}
pub fn discard(this: *MemoryReportingAllocator, buf: []const u8) void {
_ = this.memory_cost.fetchSub(buf.len, .monotonic);
if (comptime Environment.allow_assert)
log("discard({d}) = {d}", .{ buf.len, this.memory_cost.raw });
}
fn resize(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context));
if (this.child_allocator.rawResize(buf, alignment, new_len, ret_addr)) {
_ = this.memory_cost.fetchAdd(new_len -| buf.len, .monotonic);
if (comptime Environment.allow_assert)
log("resize() = {d}", .{this.memory_cost.raw});
return true;
} else {
return false;
}
}
fn free(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context));
this.child_allocator.rawFree(buf, alignment, ret_addr);
if (comptime Environment.allow_assert) {
_ = this.memory_cost.fetchSub(buf.len, .monotonic);
log("free({d}) = {d}", .{ buf.len, this.memory_cost.raw });
}
}
pub fn wrap(this: *MemoryReportingAllocator, allocator_: std.mem.Allocator) std.mem.Allocator {
this.* = .{
.child_allocator = allocator_,
};
return this.allocator();
}
pub fn allocator(this: *MemoryReportingAllocator) std.mem.Allocator {
return std.mem.Allocator{
.ptr = this,
.vtable = &MemoryReportingAllocator.VTable,
};
}
pub fn report(this: *MemoryReportingAllocator, vm: *jsc.VM) void {
const mem = this.memory_cost.load(.monotonic);
if (mem > 0) {
vm.reportExtraMemory(mem);
if (comptime Environment.allow_assert)
log("report({d})", .{mem});
}
}
pub inline fn assert(this: *const MemoryReportingAllocator) void {
if (comptime !Environment.allow_assert) {
return;
}
const memory_cost = this.memory_cost.load(.monotonic);
if (memory_cost > 0) {
Output.panic("MemoryReportingAllocator still has {d} bytes allocated", .{memory_cost});
}
}
pub const VTable = std.mem.Allocator.VTable{
.alloc = &alloc,
.resize = &resize,
.remap = &std.mem.Allocator.noRemap,
.free = &free,
};
const std = @import("std");
const bun = @import("bun");
const jsc = bun.jsc;
const Environment = bun.Environment;
const Output = bun.Output;

View File

@@ -1,6 +1,6 @@
//! A nullable allocator the same size as `std.mem.Allocator`.
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const NullableAllocator = @This();

View File

@@ -1,4 +1,4 @@
const bun = @import("bun");
const bun = @import("root").bun;
const std = @import("std");
/// When cloning large amounts of data potentially multiple times, we can
@@ -23,14 +23,14 @@ pub const LinuxMemFdAllocator = struct {
pub const ref = RefCount.ref;
pub const deref = RefCount.deref;
fd: bun.FileDescriptor = .zero,
ref_count: RefCount,
fd: bun.FileDescriptor = .invalid,
size: usize = 0,
var memfd_counter = std.atomic.Value(usize).init(0);
fn deinit(this: *LinuxMemFdAllocator) void {
this.fd.close();
_ = bun.sys.close(this.fd);
bun.destroy(this);
}
@@ -76,7 +76,7 @@ pub const LinuxMemFdAllocator = struct {
};
};
pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.JSC.WebCore.Blob.ByteStore) {
var size = len;
// size rounded up to nearest page
@@ -95,7 +95,7 @@ pub const LinuxMemFdAllocator = struct {
)) {
.result => |slice| {
return .{
.result = bun.webcore.Blob.Store.Bytes{
.result = bun.JSC.WebCore.Blob.ByteStore{
.cap = @truncate(slice.len),
.ptr = slice.ptr,
.len = @truncate(len),
@@ -123,7 +123,7 @@ pub const LinuxMemFdAllocator = struct {
return bytes.len >= 1024 * 1024 * 8;
}
pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) {
pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.JSC.WebCore.Blob.ByteStore) {
if (comptime !bun.Environment.isLinux) {
unreachable;
}
@@ -153,13 +153,13 @@ pub const LinuxMemFdAllocator = struct {
}
bun.Output.debugWarn("Failed to write to memfd: {}", .{err});
fd.close();
_ = bun.sys.close(fd);
return .{ .err = err };
},
.result => |result| {
if (result == 0) {
bun.Output.debugWarn("Failed to write to memfd: EOF", .{});
fd.close();
_ = bun.sys.close(fd);
return .{ .err = bun.sys.Error.fromCode(.NOMEM, .write) };
}
written += @intCast(result);

View File

@@ -1,4 +1,4 @@
const bun = @import("bun");
const bun = @import("root").bun;
const std = @import("std");
/// Single allocation only.

View File

@@ -1,7 +1,7 @@
const mem = @import("std").mem;
const builtin = @import("std").builtin;
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const log = bun.Output.scoped(.mimalloc, true);
const assert = bun.assert;
const Allocator = mem.Allocator;

View File

@@ -7,7 +7,7 @@ const Environment = @import("../env.zig");
const FeatureFlags = @import("../feature_flags.zig");
const Allocator = mem.Allocator;
const assert = bun.assert;
const bun = @import("bun");
const bun = @import("root").bun;
const log = bun.Output.scoped(.mimalloc, true);
pub const Arena = struct {

Some files were not shown because too many files have changed in this diff Show More