mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
Sync bun-polyfills branch (#4081)
* bun-polyfills: initial impl. & baseline refactor * move @types/ws dep from root to /test/ * bun-types: remove ReadableStream.forEach method (this does not exist, probably added by mistake) * bun-polyfills: remove extraneous stream utils * bun-polyfills: add types syncing file * bun-polyfills: re-arrange global polyfills * bun-polyfills: fix FileBlob streams types again * bun-polyfills: sync all of @types/node * bun-polyfills: typeguard all current polyfills * bun-polyfills: fix import paths * bun-polyfills: switch to wasm impl. of farmhash * bun-polyfills: support default import of bun obj * bun-polyfills: transpiler placeholder file * bun-polyfills: loaderless import.meta polyfill * bun-polyfills: refactor import.meta polyfill * bun-polyfills: repl entrypoint & todo list index * bun-types: Add null to return type of Bun.which * bun-types: match Bun.sha with Bun.hash.SHA512_256 * bun-polyfills: new "repl" package.json script * bun-polyfills: full refactor of toplevel hashes * bun-polyfills: these are fixed * bun-types: NODE_ENV is optional * bun-polyfills: fix Bun.env types * bun-types+polyfills: fix HeapSnapshot.version type * bun-polyfills: fix some web streams type conflicts * bun-polyfills: update internal FileBlob.slice * bun-polyfills: fix subproc stdin conversions * bun-polyfills: better internal fileblob types * bun-polyfills: try to sync global performance type * bun-polyfills: working zig wasm polyfills setup * bun-polyfills: update scripts * bun-polyfills: fix wasm file location resolution * bun-polyfills: goodbye farmhash (replaced by zig) * bun-polyfills: move all Bun.hash polyfills to zig * bun-polyfills: reimpl. seeding of seeded hashes * bun-polyfills: impl. undocumented murmur32v2 * bun-polyfills: switch zighash from jsdoc to .d.ts * bun-types: partial fix of Hash types * bun-polyfills: documented Hash.murmur32v2 * bun-polyfills: misc updates * bun-polyfills: enable sourcemaps * bun-polyfills: handle empty inputs to hash funcs * bun-types: narrow down hash func types * bun-polyfills: remove unnecessary bigint casts * bun-polyfills: impl. Bun.isMainThread * bun-polyfills: impl. Bun.sleep and fix sleepSync * bun-polyfills: impl. indexOfLine * bun-polyfills: impl. Bun.peek.status * bun-types: fix hashing test --------- Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
This commit is contained in:
172
packages/bun-polyfills/.gitignore
vendored
Normal file
172
packages/bun-polyfills/.gitignore
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
# Misc
|
||||
|
||||
_*
|
||||
.old
|
||||
.vscode
|
||||
!build
|
||||
9
packages/bun-polyfills/README.md
Normal file
9
packages/bun-polyfills/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Bun APIs Polyfills
|
||||
|
||||
Polyfills for Bun's JavaScript runtime APIs for use in environments outside of Bun, such as Node.js or the browser¹.
|
||||
|
||||
¹ **Note:** The current priority is Node.js, browser support will vary per polyfill.
|
||||
|
||||
## Usage
|
||||
|
||||
This is currently a work in progress and is not ready for general use.
|
||||
BIN
packages/bun-polyfills/bun.lockb
Executable file
BIN
packages/bun-polyfills/bun.lockb
Executable file
Binary file not shown.
95
packages/bun-polyfills/lib/zighash/index.mjs
Normal file
95
packages/bun-polyfills/lib/zighash/index.mjs
Normal file
@@ -0,0 +1,95 @@
|
||||
// @ts-check
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const { instance } = /** @type {ZighashInstance} */(
|
||||
await WebAssembly.instantiate(
|
||||
fs.readFileSync(path.join(path.dirname(fileURLToPath(import.meta.url)), 'zighash.wasm')),
|
||||
{
|
||||
env: {
|
||||
/** @param {any} x */
|
||||
print(x) { console.log(x); },
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
const exports = instance.exports;
|
||||
const mem = exports.memory;
|
||||
const memview = {
|
||||
get u8() { return new Uint8Array(mem.buffer); },
|
||||
get u16() { return new Uint16Array(mem.buffer); },
|
||||
get u32() { return new Uint32Array(mem.buffer); },
|
||||
get u64() { return new BigUint64Array(mem.buffer); },
|
||||
get i8() { return new Int8Array(mem.buffer); },
|
||||
get i16() { return new Int16Array(mem.buffer); },
|
||||
get i32() { return new Int32Array(mem.buffer); },
|
||||
get i64() { return new BigInt64Array(mem.buffer); },
|
||||
get f32() { return new Float32Array(mem.buffer); },
|
||||
get f64() { return new Float64Array(mem.buffer); },
|
||||
};
|
||||
|
||||
const nullptr = { ptr: -1, size: 0 };
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const allocBuffer = (
|
||||
/** @type {ArrayBufferView | ArrayBuffer | SharedArrayBuffer} */ buf,
|
||||
/** @type {boolean=} */ nullTerminate = false,
|
||||
) => {
|
||||
const size = buf.byteLength + +nullTerminate;
|
||||
if (size === 0) return nullptr;
|
||||
const ptr = exports.alloc(size);
|
||||
if (ptr === -1) throw new Error('WASM memory allocation failed');
|
||||
const u8heap = memview.u8;
|
||||
u8heap.set(new Uint8Array(ArrayBuffer.isView(buf) ? buf.buffer : buf), ptr);
|
||||
if (nullTerminate) u8heap[ptr + buf.byteLength] = 0;
|
||||
return { ptr, size };
|
||||
};
|
||||
const allocString = (
|
||||
/** @type {string} */ str,
|
||||
/** @type {boolean=} */ nullTerminate = true,
|
||||
) => {
|
||||
const strbuf = encoder.encode(str);
|
||||
return allocBuffer(strbuf, nullTerminate);
|
||||
};
|
||||
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function wyhash(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.wyhash(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function adler32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.adler32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function crc32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.crc32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function cityhash32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.cityhash32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function cityhash64(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.cityhash64(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v3(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v3(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v2(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v2(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function murmur64v2(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.murmur64v2(ptr, size, seed));
|
||||
}
|
||||
10
packages/bun-polyfills/lib/zighash/package.json
Normal file
10
packages/bun-polyfills/lib/zighash/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"name": "zighash-wasm",
|
||||
"module": "index.mjs",
|
||||
"scripts": {
|
||||
"build": "bun run clean && zig build-lib src/main.zig --name zighash -target wasm32-freestanding -dynamic -rdynamic -OReleaseSmall",
|
||||
"clean": "rm -f *.wasm *.o"
|
||||
}
|
||||
}
|
||||
58
packages/bun-polyfills/lib/zighash/src/main.zig
Normal file
58
packages/bun-polyfills/lib/zighash/src/main.zig
Normal file
@@ -0,0 +1,58 @@
|
||||
const std = @import("std");
|
||||
|
||||
extern fn print(*const u8) void;
|
||||
|
||||
comptime {
|
||||
std.debug.assert(@alignOf(u16) >= 2);
|
||||
std.debug.assert(@alignOf(u32) >= 4);
|
||||
std.debug.assert(@alignOf(u64) >= 8);
|
||||
std.debug.assert(@alignOf(i16) >= 2);
|
||||
std.debug.assert(@alignOf(i32) >= 4);
|
||||
std.debug.assert(@alignOf(i64) >= 8);
|
||||
}
|
||||
|
||||
export fn alloc(size: u32) [*]const u8 {
|
||||
const slice = std.heap.wasm_allocator.alloc(u8, size) catch @panic("wasm failed to allocate memory");
|
||||
return slice.ptr;
|
||||
}
|
||||
|
||||
export fn wyhash(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Wyhash.hash(seed, input);
|
||||
}
|
||||
export fn adler32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Adler32.hash(input);
|
||||
}
|
||||
export fn crc32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Crc32.hash(input);
|
||||
}
|
||||
export fn cityhash32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash32.hash(input);
|
||||
}
|
||||
export fn cityhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash64.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur32v3(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur3_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur32v2(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur64v2(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_64.hashWithSeed(input, seed);
|
||||
}
|
||||
25
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
Normal file
25
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
type WasmHash32Function = (input_ptr: number, input_size: number) => number;
|
||||
type WasmHash64Function = (input_ptr: number, input_size: number) => bigint;
|
||||
type WasmSeededHash32Function = (input_ptr: number, input_size: number, seed: number) => number;
|
||||
type WasmSeededHash64Function = (input_ptr: number, input_size: number, seed: bigint) => bigint;
|
||||
type JSHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
type JSHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => bigint;
|
||||
type JSSeededHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
type JSSeededHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
|
||||
type ZighashInstance = WebAssembly.WebAssemblyInstantiatedSource & {
|
||||
instance: {
|
||||
exports: {
|
||||
memory: WebAssembly.Memory,
|
||||
alloc(size: number): number,
|
||||
wyhash: WasmSeededHash64Function,
|
||||
adler32: WasmHash32Function,
|
||||
crc32: WasmHash32Function,
|
||||
cityhash32: WasmHash32Function,
|
||||
cityhash64: WasmSeededHash64Function,
|
||||
murmur32v3: WasmSeededHash32Function,
|
||||
murmur32v2: WasmSeededHash32Function,
|
||||
murmur64v2: WasmSeededHash64Function,
|
||||
};
|
||||
};
|
||||
}
|
||||
BIN
packages/bun-polyfills/lib/zighash/zighash.wasm
Executable file
BIN
packages/bun-polyfills/lib/zighash/zighash.wasm
Executable file
Binary file not shown.
27
packages/bun-polyfills/package.json
Normal file
27
packages/bun-polyfills/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"type": "module",
|
||||
"name": "bun-polyfills",
|
||||
"module": "src/index.ts",
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.5",
|
||||
"@types/which": "^3.0.0",
|
||||
"bun-types": "^0.7.0",
|
||||
"copyfiles": "^2.4.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"node": "node --enable-source-maps --import ./dist/src/repl.js",
|
||||
"clean": "rm -rf dist",
|
||||
"build": "bun run clean && bunx tsc && bunx copyfiles \"./**/*.wasm\" dist",
|
||||
"build/wasm": "bun run build/zighash",
|
||||
"build/zighash": "cd lib/zighash && bun run build && cd ../.."
|
||||
},
|
||||
"dependencies": {
|
||||
"js-md4": "^0.3.2",
|
||||
"open-editor": "^4.0.0",
|
||||
"supports-color": "^9.4.0",
|
||||
"which": "^3.0.1"
|
||||
}
|
||||
}
|
||||
31
packages/bun-polyfills/src/global/console.ts
Normal file
31
packages/bun-polyfills/src/global/console.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
//? Implements: Red colored console.error from Bun
|
||||
//if (Bun.enableANSIColors) {
|
||||
// const RED = '\x1B[31m' as const;
|
||||
// const RESET = '\x1B[0m' as const;
|
||||
// const consoleError = console.error;
|
||||
// console.error = (...args) => {
|
||||
// if (typeof args[0] === 'string') args[0] = RED + args[0];
|
||||
// consoleError(...args, RESET);
|
||||
// };
|
||||
//}
|
||||
|
||||
//? Implements: for await (const line of console) { ... }
|
||||
console[Symbol.asyncIterator] = async function* () {
|
||||
while (true) yield await new Promise(resolve => {
|
||||
process.stdin.on('data', (data: Buffer | string) => {
|
||||
const str = data.toString('utf-8').replaceAll(/[\r\n]+/g, '');
|
||||
resolve(str);
|
||||
});
|
||||
});
|
||||
} satisfies Console[typeof Symbol.asyncIterator];
|
||||
|
||||
//? Implements: Bun-exclusive console function
|
||||
console.write = ((...data) => {
|
||||
const str = data.map(val => {
|
||||
if (val instanceof ArrayBuffer) val = new TextDecoder('utf-8').decode(val);
|
||||
else if (typeof val === 'object') val = new TextDecoder('utf-8').decode(val.buffer);
|
||||
return val;
|
||||
}).join('');
|
||||
process.stdout.write(str);
|
||||
return new TextEncoder('utf-8').encode(str).byteLength;
|
||||
}) satisfies Console['write'];
|
||||
34
packages/bun-polyfills/src/global/importmeta.ts
Normal file
34
packages/bun-polyfills/src/global/importmeta.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
// Without an ESM loader, this polyfill is impossible to apply automatically,
|
||||
// due to the per-module nature of import.meta. In order to use this polyfill,
|
||||
// you must import it in every module that uses import.meta, and call it with
|
||||
// the import.meta object as the argument. When the polyfills are integrated
|
||||
// with bun build, this could be done automatically by the build process at
|
||||
// the top of every module file bundled.
|
||||
|
||||
export default function polyfillImportMeta(metaIn: ImportMeta) {
|
||||
const require2 = createRequire(metaIn.url);
|
||||
const metapath = fileURLToPath(metaIn.url);
|
||||
const meta: ImportMeta = {
|
||||
url: metaIn.url,
|
||||
main: metapath === process.argv[1],
|
||||
path: metapath,
|
||||
dir: path.dirname(metapath),
|
||||
file: path.basename(metapath),
|
||||
require: require2,
|
||||
async resolve(id: string, parent?: string) {
|
||||
return this.resolveSync(id, parent);
|
||||
},
|
||||
resolveSync(id: string, parent?: string) {
|
||||
return require2.resolve(id, {
|
||||
paths: typeof parent === 'string' ? [
|
||||
path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..')
|
||||
] : undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
Object.assign(metaIn, meta);
|
||||
}
|
||||
45
packages/bun-polyfills/src/global/index.ts
Normal file
45
packages/bun-polyfills/src/global/index.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { version } from '../modules/bun.js';
|
||||
import './console.js';
|
||||
import './process.js';
|
||||
import os from 'node:os';
|
||||
|
||||
//? NodeJS Blob doesn't implement Blob.json(), so we need to polyfill it.
|
||||
Blob.prototype.json = async function json<T>(this: Blob): Promise<T> {
|
||||
try {
|
||||
return JSON.parse(await this.text()) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, json);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
//? navigator global object polyfill
|
||||
Reflect.set(globalThis, 'navigator', {
|
||||
userAgent: `Bun/${version}`,
|
||||
hardwareConcurrency: os.cpus().length,
|
||||
});
|
||||
|
||||
//? method only available in Bun
|
||||
// this isn't quite accurate, but it shouldn't break anything and is currently here just for matching bun and node types
|
||||
const ReadableStreamDefaultReaderPrototype = Object.getPrototypeOf(new ReadableStream().getReader());
|
||||
Reflect.set(
|
||||
ReadableStreamDefaultReaderPrototype, 'readMany',
|
||||
function readMany(this: ReadableStreamDefaultReader): Promise<ReadableStreamDefaultReadManyResult<any>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const result: ReadableStreamDefaultReadManyResult<any> = {
|
||||
value: [],
|
||||
size: 0,
|
||||
done: true
|
||||
};
|
||||
this.read().then(({ done, value }) => {
|
||||
if (done) resolve(result);
|
||||
else {
|
||||
result.value.push(value);
|
||||
result.size = value.length;
|
||||
result.done = false;
|
||||
resolve(result);
|
||||
}
|
||||
}, reject);
|
||||
});
|
||||
}
|
||||
);
|
||||
19
packages/bun-polyfills/src/global/process.ts
Normal file
19
packages/bun-polyfills/src/global/process.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
|
||||
if (typeof process === 'object' && process !== null) {
|
||||
// process polyfills (node-only)
|
||||
Reflect.set(process, 'isBun', 1 satisfies Process['isBun']);
|
||||
Reflect.set(process, 'browser', false satisfies Process['browser']);
|
||||
|
||||
const NULL_VERSION = '0'.repeat(39) + '1';
|
||||
process.versions.bun = '0.7.1' satisfies Process['versions'][string]; // TODO: This can probably be fetched from somewhere in the repo
|
||||
process.versions.webkit = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.mimalloc = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.libarchive = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.picohttpparser = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.boringssl = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.zig = '0.10.0' satisfies Process['versions'][string];
|
||||
Reflect.set(process, 'revision', NULL_VERSION satisfies Process['revision']);
|
||||
|
||||
// Doesn't work on Windows sadly
|
||||
//Object.defineProperty(process, 'execPath', { value: path.resolve(root, 'cli.js') });
|
||||
}
|
||||
3
packages/bun-polyfills/src/index.ts
Normal file
3
packages/bun-polyfills/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './modules/bun.js';
|
||||
export * as default from './modules/bun.js';
|
||||
import './global/index.js';
|
||||
489
packages/bun-polyfills/src/modules/bun.ts
Normal file
489
packages/bun-polyfills/src/modules/bun.ts
Normal file
@@ -0,0 +1,489 @@
|
||||
import type {
|
||||
BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot,
|
||||
EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash
|
||||
} from 'bun';
|
||||
import { TextDecoderStream } from 'node:stream/web';
|
||||
import { NotImplementedError, type SystemError } from '../utils/errors.js';
|
||||
import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js';
|
||||
import dnsPolyfill from './bun/dns.js';
|
||||
import { FileSink } from './bun/filesink.js';
|
||||
import {
|
||||
bunHash, bunHashProto,
|
||||
MD4 as MD4Polyfill, MD5 as MD5Polyfill,
|
||||
SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill,
|
||||
SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill,
|
||||
SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill
|
||||
} from './bun/hashes.js';
|
||||
import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
|
||||
import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
|
||||
import fs from 'node:fs';
|
||||
import v8 from 'node:v8';
|
||||
import path from 'node:path';
|
||||
import util from 'node:util';
|
||||
import zlib from 'node:zlib';
|
||||
import streams from 'node:stream';
|
||||
import workers from 'node:worker_threads';
|
||||
import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process';
|
||||
import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url';
|
||||
import npm_which from 'which';
|
||||
import openEditor from 'open-editor';
|
||||
|
||||
export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main;
|
||||
|
||||
export const version = '0.7.1' satisfies typeof Bun.version; // TODO: This can probably be fetched from somewhere in the repo
|
||||
export const revision = '0'.repeat(39) + '1' satisfies typeof Bun.revision;
|
||||
//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter
|
||||
export const origin = '' satisfies typeof Bun.origin;
|
||||
// @ts-expect-error ---
|
||||
export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin;
|
||||
// @ts-expect-error ---
|
||||
export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout;
|
||||
// @ts-expect-error ---
|
||||
export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr;
|
||||
export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv;
|
||||
export const env = process.env satisfies typeof Bun.env;
|
||||
Object.setPrototypeOf(env, {
|
||||
toJSON(this: typeof env) { return { ...this }; }
|
||||
});
|
||||
// @ts-expect-error supports-color types are unbelievably bad
|
||||
export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors;
|
||||
|
||||
export const hash = bunHash satisfies typeof Bun.hash;
|
||||
Object.setPrototypeOf(hash, bunHashProto satisfies Hash);
|
||||
|
||||
export const unsafe = {
|
||||
gcAggressionLevel: () => 0, //! no-op
|
||||
arrayBufferToString: (buf) => new TextDecoder().decode(buf),
|
||||
segfault: () => {
|
||||
const segfault = new Error();
|
||||
segfault.name = 'SegfaultTest';
|
||||
segfault.message = '';
|
||||
console.error(segfault);
|
||||
process.exit(1);
|
||||
}
|
||||
} satisfies typeof Bun['unsafe'];
|
||||
|
||||
export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
|
||||
export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
|
||||
export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
|
||||
export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224;
|
||||
export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512;
|
||||
export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384;
|
||||
export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256;
|
||||
export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256;
|
||||
|
||||
export const indexOfLine = ((data, offset) => {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data);
|
||||
if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer);
|
||||
return data.indexOf(10, offset);
|
||||
}) satisfies typeof Bun.indexOfLine;
|
||||
|
||||
const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) {
|
||||
throw new NotImplementedError('Bun.peek', peek);
|
||||
};
|
||||
peek_.status = (promise => {
|
||||
return util.inspect(promise).includes('<pending>') ? 'pending'
|
||||
: util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled';
|
||||
}) satisfies typeof Bun.peek.status;
|
||||
export const peek = peek_ satisfies typeof Bun.peek;
|
||||
|
||||
export const sleep = (ms => {
|
||||
return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms));
|
||||
}) satisfies typeof Bun.sleep;
|
||||
export const sleepSync = (ms => {
|
||||
if (ms < 0) throw new TypeError('argument to sleepSync must not be negative');
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
|
||||
}) satisfies typeof Bun.sleepSync;
|
||||
|
||||
//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway.
|
||||
//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically."
|
||||
//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one.
|
||||
export const inspect = ((arg: any): string => util.inspect(arg, {
|
||||
breakLength: Infinity,
|
||||
colors: false,
|
||||
compact: true,
|
||||
customInspect: false,
|
||||
depth: Infinity,
|
||||
getters: true,
|
||||
maxArrayLength: Infinity,
|
||||
maxStringLength: Infinity,
|
||||
showHidden: false,
|
||||
showProxy: false,
|
||||
sorted: false
|
||||
})) satisfies typeof Bun.inspect;
|
||||
|
||||
export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync;
|
||||
export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve;
|
||||
|
||||
//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js
|
||||
export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe;
|
||||
|
||||
export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => {
|
||||
process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', {
|
||||
type: 'BunPolyfillWarning',
|
||||
code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT',
|
||||
detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.'
|
||||
});
|
||||
const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8');
|
||||
const json = JSON.parse(raw) as V8HeapSnapshot;
|
||||
return {
|
||||
version: 2,
|
||||
type: 'Inspector',
|
||||
nodes: json.nodes,
|
||||
edges: json.edges,
|
||||
edgeTypes: json.snapshot.meta.edge_types.flat(),
|
||||
edgeNames: json.snapshot.meta.edge_fields.flat(),
|
||||
nodeClassNames: json.snapshot.meta.node_types.flat(),
|
||||
};
|
||||
// @ts-expect-error Refer to the above emitWarning call
|
||||
}) satisfies typeof Bun.generateHeapSnapshot;
|
||||
|
||||
//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know.
|
||||
export const shrink = (() => void 0) satisfies typeof Bun.shrink;
|
||||
|
||||
export const openInEditor = ((file: string, opts?: EditorOptions) => {
|
||||
const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const;
|
||||
if (opts?.editor) openEditor(target, opts);
|
||||
else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' });
|
||||
}) satisfies typeof Bun.openInEditor;
|
||||
|
||||
export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve;
|
||||
|
||||
export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => {
|
||||
if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file);
|
||||
return new FileBlob(path, options);
|
||||
}) satisfies typeof Bun.file;
|
||||
|
||||
export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => {
|
||||
if (!isFileBlob(dest)) {
|
||||
let fd: number;
|
||||
if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w');
|
||||
// bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition...
|
||||
else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w');
|
||||
else fd = fs.openSync(Buffer.from(dest.buffer), 'w');
|
||||
|
||||
if (input instanceof Response || input instanceof Blob) {
|
||||
const data = await input.text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
if (Array.isArray(input)) {
|
||||
const data = await new Blob(input).text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
});
|
||||
} else {
|
||||
const writer = dest.writer();
|
||||
if (Array.isArray(input)) input = new Blob(input);
|
||||
if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer());
|
||||
if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input);
|
||||
if (typeof input === 'string') return writer.write(input);
|
||||
else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
}
|
||||
}) satisfies typeof Bun.write;
|
||||
|
||||
export const sha = SHA512_256.hash satisfies typeof Bun.sha;
|
||||
|
||||
export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds;
|
||||
|
||||
//? This just prints out some debug stuff in console, and as the name implies no one should be using it.
|
||||
//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function.
|
||||
export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => {
|
||||
console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.');
|
||||
}) satisfies unknown; /* undocumented */
|
||||
|
||||
export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync;
|
||||
export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync;
|
||||
export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync;
|
||||
export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync;
|
||||
|
||||
export const which = ((cmd: string, options) => {
|
||||
const opts: npm_which.Options = { all: false, nothrow: true };
|
||||
if (options?.PATH) opts.path = options.PATH;
|
||||
const result = npm_which.sync(cmd, opts) as string | null;
|
||||
if (!result || !options?.cwd) return result;
|
||||
if (path.normalize(result).includes(path.normalize(options.cwd))) return result;
|
||||
else return null;
|
||||
}) satisfies typeof Bun.which;
|
||||
|
||||
export const spawn = ((...args) => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let stdinSrc: typeof opts.stdio[0] = null;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdinSrc = opts.stdio[0];
|
||||
stdio[0] = 'pipe';
|
||||
}
|
||||
|
||||
const subp = chp.spawn(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
// why is this set to (string | number) on env values...
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio
|
||||
}) as unknown as Subprocess;
|
||||
const subpAsNode = subp as unknown as ChildProcess;
|
||||
const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const;
|
||||
if (subpAsNode.stdout) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stdout = rstream;
|
||||
}
|
||||
if (subpAsNode.stderr) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stderr = rstream;
|
||||
}
|
||||
let internalStdinStream: streams.Writable;
|
||||
if (subpAsNode.stdin) {
|
||||
const wstream = subpAsNode.stdin;
|
||||
Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) {
|
||||
void this.end(); /* if it fails its already closed */
|
||||
return this;
|
||||
});
|
||||
internalStdinStream = wstream;
|
||||
(<Mutable<Subprocess>>subp).stdin = new FileSink(wstream);
|
||||
|
||||
}
|
||||
Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } });
|
||||
Object.defineProperty(subp, 'exited', {
|
||||
value: new Promise((resolve, reject) => {
|
||||
subpAsNode.once('exit', (code) => {
|
||||
stdstreams[0]?.destroy();
|
||||
stdstreams[1]?.destroy();
|
||||
stdstreams[2]?.destroy();
|
||||
subp.kill();
|
||||
subp.unref();
|
||||
subpAsNode.disconnect?.();
|
||||
subpAsNode.removeAllListeners();
|
||||
resolve(code);
|
||||
});
|
||||
})
|
||||
});
|
||||
if (stdinSrc) subpAsNode.once('spawn', () => {
|
||||
const stdinWeb = streams.Writable.toWeb(internalStdinStream);
|
||||
if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]);
|
||||
if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb);
|
||||
else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb);
|
||||
else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream);
|
||||
else void stdinSrc;
|
||||
});
|
||||
// change the error stack to point to the spawn() call instead of internal Node.js callback stuff
|
||||
const here = new Error('§__PLACEHOLDER__§');
|
||||
Error.captureStackTrace(here, spawn);
|
||||
if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => {
|
||||
err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? ''));
|
||||
err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message);
|
||||
throw err;
|
||||
}) as unknown as Subprocess;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawn;
|
||||
export const spawnSync = ((...args): SyncSubprocess => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let input: ArrayBufferView | string | undefined;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdio[0] = null; // will be overriden by chp.spawnSync "input" option
|
||||
//! Due to the fully async nature of Blobs, Responses and Requests,
|
||||
//! we can't synchronously get the data out of them here in userland.
|
||||
if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync);
|
||||
else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync);
|
||||
else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]);
|
||||
else input = opts.stdio[0] as ArrayBufferView;
|
||||
}
|
||||
|
||||
const subp = chp.spawnSync(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio, input
|
||||
}) as unknown as SyncSubprocess;
|
||||
const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>;
|
||||
if (subpAsNode.error) throw subpAsNode.error;
|
||||
|
||||
subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal)
|
||||
subp.success = subp.exitCode === 0;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawnSync;
|
||||
|
||||
export const escapeHTML = ((input) => {
|
||||
const str = String(input);
|
||||
let out = '';
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str[i];
|
||||
switch (char) {
|
||||
case '"': out += '"'; break;
|
||||
case "'": out += '''; break;
|
||||
case '&': out += '&'; break;
|
||||
case '<': out += '<'; break;
|
||||
case '>': out += '>'; break;
|
||||
default: out += char;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}) satisfies typeof Bun.escapeHTML;
|
||||
|
||||
export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => {
|
||||
return (async () => {
|
||||
const sink = new ArrayBufferSink();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
sink.write(value);
|
||||
}
|
||||
return sink.end() as ArrayBuffer;
|
||||
})();
|
||||
}) satisfies typeof Bun.readableStreamToArrayBuffer;
|
||||
export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => {
|
||||
let result = '';
|
||||
const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
//! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh
|
||||
if (done || !value || !value?.length) break;
|
||||
result += value;
|
||||
}
|
||||
return result;
|
||||
}) satisfies typeof Bun.readableStreamToText;
|
||||
export const readableStreamToBlob = (async (stream: ReadableStream<any>) => {
|
||||
const parts = await readableStreamToArray(stream);
|
||||
return new Blob(parts as BlobPart[]);
|
||||
}) satisfies typeof Bun.readableStreamToBlob;
|
||||
export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => {
|
||||
const array = new Array<T>();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done || !value || !(<any>value)?.length) break;
|
||||
array.push(value as unknown as T);
|
||||
}
|
||||
return array;
|
||||
}) satisfies typeof Bun.readableStreamToArray;
|
||||
export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => {
|
||||
const text = await readableStreamToText(stream);
|
||||
try {
|
||||
return JSON.parse(text) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, readableStreamToJSON);
|
||||
throw err;
|
||||
}
|
||||
}) satisfies typeof Bun.readableStreamToJSON;
|
||||
|
||||
export const concatArrayBuffers = ((buffers) => {
|
||||
let size = 0;
|
||||
for (const chunk of buffers) size += chunk.byteLength;
|
||||
const buffer = new ArrayBuffer(size);
|
||||
const view = new Uint8Array(buffer);
|
||||
let offset = 0;
|
||||
for (const chunk of buffers) {
|
||||
view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
return buffer;
|
||||
}) satisfies typeof Bun.concatArrayBuffers;
|
||||
|
||||
export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink;
|
||||
|
||||
export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL;
|
||||
export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath;
|
||||
|
||||
export const dns = dnsPolyfill satisfies typeof Bun.dns;
|
||||
|
||||
export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread;
|
||||
|
||||
//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats.
|
||||
//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible,
|
||||
//! but without needing to strip out all Bun.plugin calls from the source code for running on Node.
|
||||
const dummyPluginBuilder: PluginBuilder = ({
|
||||
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
config: { plugins: [], entrypoints: [] },
|
||||
}) satisfies PluginBuilder;
|
||||
const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>;
|
||||
bunPlugin.clearAll = () => void 0;
|
||||
export const plugin = bunPlugin satisfies typeof Bun.plugin;
|
||||
/*void plugin({
|
||||
name: 'test',
|
||||
target: 'bun',
|
||||
setup(builder) {
|
||||
if (builder.target !== 'bun') return;
|
||||
builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.importer;
|
||||
if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' };
|
||||
else return;
|
||||
});
|
||||
builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.path;
|
||||
return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' };
|
||||
});
|
||||
}
|
||||
});*/
|
||||
67
packages/bun-polyfills/src/modules/bun/arraybuffersink.ts
Normal file
67
packages/bun-polyfills/src/modules/bun/arraybuffersink.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>;
|
||||
|
||||
export class ArrayBufferSink implements BunArrayBufferSink {
|
||||
#started: boolean = true;
|
||||
#closed: boolean = false;
|
||||
#offset: number = 0;
|
||||
#stream: boolean = false;
|
||||
#asUint8: boolean = false;
|
||||
#buffer: Buffer = Buffer.allocUnsafe(8192);
|
||||
|
||||
get sinkId(): number { return 0; } //? undocumented, seems to always return 0
|
||||
|
||||
#ASSERT_NOT_CLOSED(caller: AnyFunction): void {
|
||||
if (!this.#closed) return;
|
||||
const err = new TypeError('Expected Sink');
|
||||
Error.captureStackTrace(err, caller);
|
||||
throw err;
|
||||
}
|
||||
|
||||
start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void {
|
||||
this.#ASSERT_NOT_CLOSED(this.start);
|
||||
this.#started = true;
|
||||
this.#offset = 0;
|
||||
this.#stream = stream;
|
||||
this.#asUint8 = asUint8Array;
|
||||
if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark);
|
||||
}
|
||||
|
||||
write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
this.#ASSERT_NOT_CLOSED(this.write);
|
||||
if (typeof data === 'string') data = new TextEncoder().encode(data);
|
||||
const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
||||
// this is very bad API design to not throw an error here, but it's what Bun does
|
||||
if (!this.#started) return writedata.byteLength;
|
||||
|
||||
if (this.#offset + writedata.byteLength > this.#buffer.byteLength) {
|
||||
const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024;
|
||||
const newBuffer = Buffer.allocUnsafe(newLength);
|
||||
newBuffer.set(this.#buffer);
|
||||
this.#buffer = newBuffer;
|
||||
}
|
||||
this.#buffer.set(writedata, this.#offset);
|
||||
this.#offset += writedata.byteLength;
|
||||
return writedata.byteLength;
|
||||
}
|
||||
|
||||
flush(): number | Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.flush);
|
||||
if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing
|
||||
const flushed = new Uint8Array(this.#offset);
|
||||
flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice
|
||||
this.#offset = 0;
|
||||
return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer;
|
||||
}
|
||||
|
||||
end(): Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.end);
|
||||
const stream = this.#stream;
|
||||
this.#stream = true; // force flush() to return the data
|
||||
const buffer = this.flush() as Uint8Array | ArrayBuffer;
|
||||
this.#stream = stream;
|
||||
this.#started = false;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
close(): void { this.#closed = true; } //? undocumented
|
||||
}
|
||||
21
packages/bun-polyfills/src/modules/bun/dns.ts
Normal file
21
packages/bun-polyfills/src/modules/bun/dns.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import dns from 'node:dns';
|
||||
|
||||
const dnsObj: typeof Bun.dns = {
|
||||
async lookup(hostname, options) {
|
||||
const opts = { verbatim: true, all: true } as dns.LookupOptions;
|
||||
if (options?.family) {
|
||||
if (options.family === 'IPv4') opts.family = 4;
|
||||
else if (options.family === 'IPv6') opts.family = 6;
|
||||
else if (options.family === 'any') opts.family = 0;
|
||||
else opts.family = options.family;
|
||||
}
|
||||
if (options?.flags) opts.hints = options.flags;
|
||||
const records = ((await dns.promises.resolveAny(hostname))
|
||||
.filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[])
|
||||
.map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl }));
|
||||
return records;
|
||||
},
|
||||
// This has more properties but they're not documented on bun-types yet, oh well.
|
||||
};
|
||||
|
||||
export default dnsObj;
|
||||
195
packages/bun-polyfills/src/modules/bun/fileblob.ts
Normal file
195
packages/bun-polyfills/src/modules/bun/fileblob.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import fs from 'node:fs';
|
||||
import tty from 'node:tty';
|
||||
import streams from 'node:stream';
|
||||
import { ReadableStream as NodeWebReadableStream } from 'node:stream/web';
|
||||
import { FileSink } from './filesink.js';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun';
|
||||
|
||||
type NodeJSStream = streams.Readable | streams.Writable;
|
||||
|
||||
function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> {
|
||||
if (stream.isPaused()) stream.resume();
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: any[] = [];
|
||||
const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); };
|
||||
const end = () => {
|
||||
resolve(new Blob(chunks, type != null ? { type } : undefined));
|
||||
stream.off('data', dataHandler);
|
||||
stream.off('end', end);
|
||||
stream.pause();
|
||||
};
|
||||
stream.once('data', dataHandler).once('end', end);
|
||||
//.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we.
|
||||
});
|
||||
}
|
||||
|
||||
export const NodeJSStreamFileBlob = class FileBlob extends Blob {
|
||||
constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') {
|
||||
super(undefined, { type });
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (source === process.stdout || source === process.stdin || source === process.stderr) {
|
||||
this.#iostream = true;
|
||||
}
|
||||
this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream);
|
||||
this.#source = source;
|
||||
this.#slice = slice;
|
||||
this.#size = Infinity;
|
||||
}
|
||||
readonly #iostream: boolean = false;
|
||||
readonly #readable: boolean;
|
||||
readonly #source: NodeJSStream;
|
||||
readonly #slice: [number?, number?];
|
||||
#size: number;
|
||||
|
||||
slice(begin?: number, end?: number, contentType?: string): Blob;
|
||||
slice(begin?: number, contentType?: string): Blob;
|
||||
slice(contentType?: string): Blob;
|
||||
slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob {
|
||||
if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType);
|
||||
if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType);
|
||||
return new FileBlob(this.#source, [beginOrType, endOrType], contentType);
|
||||
}
|
||||
|
||||
override stream(): ReadableStream<Uint8Array> {
|
||||
// This makes no sense but Bun does it so we will too
|
||||
if (!this.#readable) return new ReadableStream();
|
||||
return streams.Readable.toWeb(this.#source as streams.Readable);
|
||||
}
|
||||
|
||||
#blobStackFn: AnyFunction = this.#getBlob;
|
||||
|
||||
async #getBlob(): Promise<Blob> {
|
||||
if (!this.#readable) {
|
||||
const err = new SystemError(-1, 'read');
|
||||
Error.captureStackTrace(err, this.#blobStackFn);
|
||||
throw err;
|
||||
}
|
||||
const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice);
|
||||
this.#size = blob.size;
|
||||
return blob;
|
||||
}
|
||||
|
||||
override async text(): Promise<string> {
|
||||
if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text;
|
||||
return (await this.#getBlob()).text();
|
||||
}
|
||||
override async arrayBuffer(): Promise<ArrayBuffer> {
|
||||
this.#blobStackFn = this.arrayBuffer;
|
||||
return (await this.#getBlob()).arrayBuffer();
|
||||
}
|
||||
override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> {
|
||||
this.#blobStackFn = this.json;
|
||||
return JSON.parse(await this.text()) as Promise<TJSONReturnType>;
|
||||
}
|
||||
|
||||
override get size(): number { return this.#size; }
|
||||
override set size(_) { return; }
|
||||
};
|
||||
|
||||
export class FileBlob extends Blob implements BunFileBlob {
|
||||
constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) {
|
||||
opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension
|
||||
super(undefined, opts);
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob;
|
||||
const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined;
|
||||
if (slice) {
|
||||
slice[0] &&= slice[0] | 0; // int cast
|
||||
slice[1] &&= slice[1] | 0; // int cast
|
||||
this.#slice = slice;
|
||||
slice[0] ??= 0;
|
||||
if (typeof slice[1] === 'undefined') {
|
||||
if (slice[0] < 0) this.#sliceSize = -slice[0];
|
||||
}
|
||||
else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]);
|
||||
else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0];
|
||||
}
|
||||
if (typeof fdOrPath === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPath, 'r+');
|
||||
} catch (err) {
|
||||
this.#error = err as SystemError;
|
||||
}
|
||||
else {
|
||||
this.#fd = fdOrPath;
|
||||
this.#error = Reflect.get(opts, '__error') as SystemError | undefined;
|
||||
}
|
||||
if (!this.#error) {
|
||||
const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] });
|
||||
this.#readable = streams.Readable.toWeb(rstream);
|
||||
}
|
||||
}
|
||||
readonly #readable?: NodeWebReadableStream;
|
||||
readonly #error?: SystemError;
|
||||
readonly #slice: [number?, number?] = [];
|
||||
readonly #sliceSize: number = 0;
|
||||
readonly #fd: number = NaN;
|
||||
#data?: Blob;
|
||||
|
||||
#read() {
|
||||
if (this.#error) throw this.#error;
|
||||
const read = fs.readFileSync(this.#fd);
|
||||
this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type });
|
||||
}
|
||||
|
||||
//! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side
|
||||
get readable(): ReadableStream<any> {
|
||||
if (this.#error) throw this.#error;
|
||||
return this.#readable! as ReadableStream;
|
||||
}
|
||||
|
||||
get lastModified(): number {
|
||||
if (this.#error) throw this.#error;
|
||||
return fs.fstatSync(this.#fd).mtimeMs;
|
||||
}
|
||||
|
||||
async exists(): Promise<boolean> {
|
||||
return !this.#error;
|
||||
}
|
||||
|
||||
writer(): BunFileSink {
|
||||
if (this.#error) throw this.#error;
|
||||
return new FileSink(this.#fd);
|
||||
}
|
||||
|
||||
// TODO: what's contentType?
|
||||
override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob {
|
||||
if (typeof begin === 'string') {
|
||||
contentType = begin;
|
||||
begin = undefined;
|
||||
}
|
||||
if (typeof end === 'string') {
|
||||
contentType = end;
|
||||
end = undefined;
|
||||
}
|
||||
return new FileBlob(this.#fd, {
|
||||
__error: this.#error,
|
||||
__slice: [begin, end],
|
||||
__data: this.#data?.slice(begin, end),
|
||||
} as BlobPropertyBag);
|
||||
}
|
||||
override arrayBuffer(): Promise<ArrayBuffer> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).arrayBuffer();
|
||||
}
|
||||
override text(): Promise<string> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).text();
|
||||
}
|
||||
override json(): Promise<any>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).json();
|
||||
}
|
||||
override stream(): NodeJS.ReadableStream;
|
||||
override stream(): ReadableStream<Uint8Array>;
|
||||
override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).stream();
|
||||
}
|
||||
|
||||
override get size(): number {
|
||||
return this.#data?.size ?? (this.#sliceSize || 0);
|
||||
}
|
||||
}
|
||||
87
packages/bun-polyfills/src/modules/bun/filesink.ts
Normal file
87
packages/bun-polyfills/src/modules/bun/filesink.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import fs from 'node:fs';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileSink as BunFileSink } from 'bun';
|
||||
|
||||
export class FileSink implements BunFileSink {
|
||||
constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) {
|
||||
if (typeof fdOrPathOrStream === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPathOrStream, 'a+');
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
} catch (err) {
|
||||
throw err as SystemError;
|
||||
}
|
||||
else if (typeof fdOrPathOrStream === 'number') {
|
||||
this.#fd = fdOrPathOrStream; // hope this fd is writable
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
}
|
||||
else {
|
||||
this.#stream = fdOrPathOrStream;
|
||||
}
|
||||
}
|
||||
#fd: number = NaN;
|
||||
#stream: NodeJS.WritableStream | undefined;
|
||||
#closed: boolean = false;
|
||||
#writtenSinceFlush: number = 0;
|
||||
#totalWritten: number = 0;
|
||||
|
||||
start(options?: { highWaterMark?: number | undefined; } | undefined): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
ref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
unref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
if (this.#closed) {
|
||||
return typeof chunk === 'string' ? chunk.length : chunk.byteLength;
|
||||
}
|
||||
if (this.#stream) {
|
||||
let data;
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk);
|
||||
else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer);
|
||||
else data = chunk;
|
||||
this.#stream.write(data);
|
||||
const written = typeof data === 'string' ? data.length : data.byteLength;
|
||||
this.#totalWritten += written;
|
||||
return written;
|
||||
}
|
||||
if (typeof chunk === 'string') {
|
||||
fs.appendFileSync(this.#fd, chunk, 'utf8');
|
||||
this.#writtenSinceFlush += chunk.length;
|
||||
return chunk.length;
|
||||
}
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk));
|
||||
else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer));
|
||||
this.#writtenSinceFlush += chunk.byteLength;
|
||||
return chunk.byteLength;
|
||||
}
|
||||
|
||||
//! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior
|
||||
flush(): number | Promise<number> {
|
||||
if (this.#closed) return 0;
|
||||
// no-op because this is a synchronous implementation
|
||||
const written = this.#writtenSinceFlush;
|
||||
this.#writtenSinceFlush = 0;
|
||||
return written;
|
||||
}
|
||||
|
||||
//! not sure what to do with this error
|
||||
end(error?: Error): number | Promise<number> {
|
||||
if (this.#closed) return this.#totalWritten;
|
||||
const flushed = this.flush();
|
||||
if (this.#stream) {
|
||||
this.#stream.end();
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
this.#totalWritten = fs.fstatSync(this.#fd).size;
|
||||
fs.closeSync(this.#fd);
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
}
|
||||
185
packages/bun-polyfills/src/modules/bun/hashes.ts
Normal file
185
packages/bun-polyfills/src/modules/bun/hashes.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun';
|
||||
import nodecrypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import md4, { Md4 } from 'js-md4';
|
||||
import { wyhash, adler32, crc32, cityhash32, cityhash64, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs';
|
||||
|
||||
export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash;
|
||||
export const bunHashProto: Hash = {
|
||||
wyhash(data, seed = 0n) { return wyhash(data, seed); },
|
||||
adler32(data) { return adler32(data); },
|
||||
crc32(data) { return crc32(data); },
|
||||
cityHash32(data) { return cityhash32(data); },
|
||||
cityHash64(data, seed = 0n) { return cityhash64(data, seed); },
|
||||
murmur32v3(data, seed = 0) { return murmur32v3(data, seed); },
|
||||
murmur32v2(data, seed = 0) { return murmur32v2(data, seed); },
|
||||
murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); },
|
||||
};
|
||||
|
||||
type HashImpl = {
|
||||
digest(): Buffer;
|
||||
digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
update(data: nodecrypto.BinaryLike): HashImpl;
|
||||
update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl;
|
||||
};
|
||||
abstract class BaseHash<T> implements CryptoHashInterface<T> {
|
||||
readonly #hash: HashImpl | null;
|
||||
constructor(algorithm: string | HashImpl) {
|
||||
if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm);
|
||||
// If no preset algorithm is given, expect the subclass to fully implement its own.
|
||||
else this.#hash = algorithm;
|
||||
}
|
||||
update(data: StringOrBuffer) {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data));
|
||||
else this.#hash!.update(data);
|
||||
return this as unknown as T; // is there any good way to do this without asserting?
|
||||
}
|
||||
digest(encoding: DigestEncoding): string;
|
||||
digest(hashInto?: TypedArray): TypedArray;
|
||||
digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
if (typeof encodingOrHashInto === 'string') {
|
||||
const encoded = this.#hash!.digest(encodingOrHashInto);
|
||||
// you'd think node would throw an error if the encoding is invalid, but nope!
|
||||
// instead it silently returns as if you passed no encoding and gives a Buffer...
|
||||
if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`);
|
||||
else return encoded;
|
||||
}
|
||||
const digested = this.#hash!.digest();
|
||||
if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength);
|
||||
if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`);
|
||||
if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) {
|
||||
// avoid checking endianness for every loop iteration
|
||||
const endianAwareInsert = os.endianness() === 'LE'
|
||||
? (arr: string[], j: number, num: string) => arr[7 - j] = num
|
||||
: (arr: string[], j: number, num: string) => arr[j] = num;
|
||||
|
||||
for (let i = 0; i < digested.byteLength; i += 8) {
|
||||
const bigintStrArr = ['', '', '', '', '', '', '', ''];
|
||||
for (let j = 0; j < 8; j++) {
|
||||
const byte = digested[i + j];
|
||||
if (byte === undefined) break;
|
||||
endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0'));
|
||||
}
|
||||
encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`);
|
||||
}
|
||||
} else {
|
||||
const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor;
|
||||
// this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes
|
||||
encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT));
|
||||
}
|
||||
return encodingOrHashInto;
|
||||
}
|
||||
static hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' };
|
||||
static readonly byteLength: number;
|
||||
abstract readonly byteLength: number;
|
||||
}
|
||||
|
||||
export class SHA1 extends BaseHash<SHA1> {
|
||||
constructor() { super('sha1'); }
|
||||
static override readonly byteLength = 20;
|
||||
override readonly byteLength = 20;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD4 extends BaseHash<MD4> {
|
||||
constructor() { //! Not supported by nodecrypto
|
||||
const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] };
|
||||
function digest(): Buffer;
|
||||
function digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
function digest(encoding?: nodecrypto.BinaryToTextEncoding) {
|
||||
const buf = Buffer.from(hash.arrayBuffer());
|
||||
if (encoding) return buf.toString(encoding);
|
||||
else return buf;
|
||||
}
|
||||
function update(data: nodecrypto.BinaryLike) {
|
||||
if (typeof data === 'string') hash._update(data);
|
||||
else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data));
|
||||
else hash._update(new Uint8Array(data.buffer));
|
||||
return hash as unknown as MD4HashImpl;
|
||||
}
|
||||
type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update };
|
||||
// @ts-expect-error patches to reuse the BaseHash methods
|
||||
hash.digest = digest; hash._update = hash.update; hash.update = update;
|
||||
super(hash as unknown as MD4HashImpl);
|
||||
}
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD5 extends BaseHash<MD5> {
|
||||
constructor() { super('md5'); }
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA224 extends BaseHash<SHA224> {
|
||||
constructor() { super('sha224'); }
|
||||
static override readonly byteLength = 28;
|
||||
override readonly byteLength = 28;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512 extends BaseHash<SHA512> {
|
||||
constructor() { super('sha512'); }
|
||||
static override readonly byteLength = 64;
|
||||
override readonly byteLength = 64;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA384 extends BaseHash<SHA384> {
|
||||
constructor() { super('sha384'); }
|
||||
static override readonly byteLength = 48;
|
||||
override readonly byteLength = 48;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA256 extends BaseHash<SHA256> {
|
||||
constructor() { super('sha256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512_256 extends BaseHash<SHA512_256> {
|
||||
constructor() { super('sha512-256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
103
packages/bun-polyfills/src/modules/bun/transpiler.ts
Normal file
103
packages/bun-polyfills/src/modules/bun/transpiler.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun';
|
||||
import { NotImplementedError } from '../../utils/errors.js';
|
||||
|
||||
// TODO: Possible implementation with WASM builds of bun with just the transpiler?
|
||||
// NOTE: This is possible to implement with something like SWC, and was previously done,
|
||||
// but it has lots of quirks due to the differences between SWC and Bun, so the plan is
|
||||
// to not do that unless there is actual demand for using Bun.Transpiler in Node.js before
|
||||
// the WASM build is worked on. The signatures are here for now as a placeholder.
|
||||
|
||||
export default class Transpiler implements BunTranspiler {
|
||||
constructor(options?: TranspilerOptions) {
|
||||
this.#options = options ?? {};
|
||||
}
|
||||
|
||||
async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.transform);
|
||||
}
|
||||
|
||||
transformSync(code: StringOrBuffer, ctx: object): string;
|
||||
transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string;
|
||||
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string;
|
||||
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
if (typeof loader !== 'string') loader = 'js';
|
||||
throw new NotImplementedError('Bun.Transpiler', this.transformSync);
|
||||
}
|
||||
|
||||
scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.scan);
|
||||
//return {
|
||||
// imports: this.scanImports(code),
|
||||
// exports: this.#scanExports(code)
|
||||
//};
|
||||
}
|
||||
|
||||
scanImports(code: StringOrBuffer): {
|
||||
kind: 'import-statement' | 'dynamic-import';
|
||||
path: string;
|
||||
}[] {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.scanImports);
|
||||
//const imports: { kind: 'import-statement' | 'dynamic-import', path: string }[] = [];
|
||||
//this.#scanTopLevelImports(code).forEach(x => imports.push({ kind: 'import-statement', path: x }));
|
||||
//this.#scanDynamicImports(code).forEach(x => imports.push({ kind: 'dynamic-import', path: x }));
|
||||
//return imports;
|
||||
}
|
||||
|
||||
/*#scanDynamicImports(code: string): string[] {
|
||||
return this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body.filter(x => x.type === 'ExpressionStatement' && x.expression.type === 'CallExpression' && x.expression.callee.type === 'Import')
|
||||
.map(i => (((i as swc.ExpressionStatement).expression as swc.CallExpression).arguments[0].expression as swc.StringLiteral).value);
|
||||
}*/
|
||||
|
||||
/*#scanTopLevelImports(code: string): string[] {
|
||||
return this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body.filter(x => x.type === 'ImportDeclaration' || x.type === 'ExportAllDeclaration' || x.type === 'ExportNamedDeclaration')
|
||||
.filter(i => !(i as swc.ImportDeclaration).typeOnly)
|
||||
.map(i => (i as swc.ImportDeclaration).source.value);
|
||||
}*/
|
||||
|
||||
/*#scanExports(code: string, includeDefault: boolean = false): string[] {
|
||||
const parsed = this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body;
|
||||
const exports = [];
|
||||
exports.push(parsed.filter(x => x.type === 'ExportDeclaration' && !x.declaration.declare)
|
||||
.flatMap(i => ((i as swc.ExportDeclaration).declaration as swc.ClassDeclaration).identifier?.value ??
|
||||
((i as swc.ExportDeclaration).declaration as swc.VariableDeclaration).declarations.map(d => (d.id as swc.Identifier).value)
|
||||
)
|
||||
);
|
||||
exports.push(parsed.filter(x => x.type === 'ExportNamedDeclaration')
|
||||
.flatMap(i => (i as swc.ExportNamedDeclaration).specifiers
|
||||
.filter(s => s.type === 'ExportSpecifier' && !s.isTypeOnly)
|
||||
.map(s => (s as swc.NamedExportSpecifier).exported?.value ?? (s as swc.NamedExportSpecifier).orig.value)
|
||||
)
|
||||
);
|
||||
if (includeDefault) exports.push(this.#scanDefaultExport(code) ?? []);
|
||||
return exports.flat();
|
||||
}*/
|
||||
|
||||
/*#scanDefaultExport(code: string): 'default' | undefined {
|
||||
const parsed = this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body;
|
||||
|
||||
const defaultExportDecl = parsed.find(x => x.type === 'ExportDefaultDeclaration') as swc.ExportDefaultDeclaration | undefined;
|
||||
if (!defaultExportDecl) {
|
||||
const defaultExportExpr = parsed.find(x => x.type === 'ExportDefaultExpression') as swc.ExportDefaultExpression | undefined;
|
||||
if (!defaultExportExpr) return undefined;
|
||||
if (!defaultExportExpr.expression.type.startsWith('Ts')) return 'default';
|
||||
else return undefined;
|
||||
}
|
||||
|
||||
if (!defaultExportDecl.decl.type.startsWith('Ts') && !Reflect.get(defaultExportDecl.decl, 'declare')) return 'default';
|
||||
else return undefined;
|
||||
}*/
|
||||
|
||||
#options: TranspilerOptions;
|
||||
}
|
||||
29
packages/bun-polyfills/src/repl.ts
Normal file
29
packages/bun-polyfills/src/repl.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import bun from './index.js';
|
||||
|
||||
// This file serves two purposes:
|
||||
// 1. It is the entry point for using the Bun global in the REPL. (--import this file)
|
||||
// 2. It makes TypeScript check the full structural compatibility of the Bun global vs the polyfills object,
|
||||
// which allows for the type assertion below to be used as a TODO list index.
|
||||
|
||||
globalThis.Bun = bun as typeof bun & {
|
||||
// TODO: Missing polyfills
|
||||
readableStreamToFormData: typeof import('bun').readableStreamToFormData;
|
||||
deepEquals: typeof import('bun').deepEquals;
|
||||
deepMatch: typeof import('bun').deepMatch;
|
||||
build: typeof import('bun').build;
|
||||
mmap: typeof import('bun').mmap;
|
||||
gc: typeof import('bun').gc;
|
||||
connect: typeof import('bun').connect;
|
||||
listen: typeof import('bun').listen;
|
||||
Transpiler: typeof import('bun').Transpiler;
|
||||
password: typeof import('bun').password;
|
||||
CryptoHashInterface: typeof import('bun').CryptoHashInterface;
|
||||
CryptoHasher: typeof import('bun').CryptoHasher;
|
||||
FileSystemRouter: typeof import('bun').FileSystemRouter;
|
||||
|
||||
//? Polyfilled but with broken types (See each one in ./src/modules/bun.ts for details)
|
||||
generateHeapSnapshot: typeof import('bun').generateHeapSnapshot;
|
||||
stdout: typeof import('bun').stdout;
|
||||
stderr: typeof import('bun').stderr;
|
||||
stdin: typeof import('bun').stdin;
|
||||
};
|
||||
13
packages/bun-polyfills/src/types/helpers.d.ts
vendored
Normal file
13
packages/bun-polyfills/src/types/helpers.d.ts
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
type AnyFunction = (...args: any[]) => any;
|
||||
type AnyClass = new (...args: any[]) => any;
|
||||
type AnyCallable = AnyFunction | AnyClass;
|
||||
|
||||
type MapKeysType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? K : never;
|
||||
type MapValuesType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? V : never;
|
||||
|
||||
type Mutable<T> = { -readonly [K in keyof T]: T[K] };
|
||||
|
||||
/** Excluding the BigInt typed arrays */
|
||||
type TypedArrayConstructor =
|
||||
| typeof Uint8Array | typeof Uint16Array | typeof Uint32Array | typeof Uint8ClampedArray
|
||||
| typeof Int8Array | typeof Int16Array | typeof Int32Array | typeof Float32Array | typeof Float64Array;
|
||||
72
packages/bun-polyfills/src/types/md4.d.ts
vendored
Normal file
72
packages/bun-polyfills/src/types/md4.d.ts
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
declare module 'js-md4' {
|
||||
export type MD4Input = string | ArrayBuffer | Uint8Array | number[];
|
||||
|
||||
interface md4 {
|
||||
/**
|
||||
* # Broken, will throw an error.
|
||||
* @deprecated Use {@link md4.hex} instead.
|
||||
*/
|
||||
(input: MD4Input): never;
|
||||
/** Creates an `Md4` hasher instance. */
|
||||
create(): Md4;
|
||||
/** Shortcut for `md4.create().update(...)` */
|
||||
update(message: MD4Input): Md4;
|
||||
/** Hash `message` into a hex string. */
|
||||
hex(message: MD4Input): string;
|
||||
/** Hash `message` into an Array. */
|
||||
array(message: MD4Input): number[];
|
||||
/** Identical to {@link md4.array}. */
|
||||
digest(message: MD4Input): number[];
|
||||
/**
|
||||
* Identical to {@link md4.arrayBuffer}.
|
||||
* @deprecated Use {@link md4.arrayBuffer} instead.
|
||||
*/
|
||||
buffer(message: MD4Input): ArrayBuffer;
|
||||
/** Hash `message` into an ArrayBuffer. */
|
||||
arrayBuffer(message: MD4Input): ArrayBuffer;
|
||||
}
|
||||
|
||||
export type Md4 = Md4;
|
||||
declare class Md4 {
|
||||
private constructor();
|
||||
|
||||
private toString(): string;
|
||||
private finalize(): void;
|
||||
private hash(): void;
|
||||
/**
|
||||
* Append `message` to the internal hash source data.
|
||||
* @returns A reference to `this` for chaining, or nothing if the instance has been finalized.
|
||||
*/
|
||||
update(message: MD4Input): this | void;
|
||||
/** Hash into a hex string. Finalizes the hash. */
|
||||
hex(): string;
|
||||
/** Hash into an Array. Finalizes the hash. */
|
||||
array(): number[];
|
||||
/** Identical to {@link Md4.array}. */
|
||||
digest(): number[];
|
||||
/**
|
||||
* Identical to {@link Md4.arrayBuffer}.
|
||||
* @deprecated Use {@link Md4.arrayBuffer} instead.
|
||||
*/
|
||||
buffer(): ArrayBuffer;
|
||||
/** Hash into an ArrayBuffer. Finalizes the hash. */
|
||||
arrayBuffer(): ArrayBuffer;
|
||||
|
||||
private buffer8: Uint8Array;
|
||||
private blocks: Uint32Array;
|
||||
private bytes: number;
|
||||
private start: number;
|
||||
private h3: number;
|
||||
private h2: number;
|
||||
private h1: number;
|
||||
private h0: number;
|
||||
readonly hashed: boolean;
|
||||
/** If true, `update()` operations will silently fail. */
|
||||
readonly finalized: boolean;
|
||||
readonly first: boolean;
|
||||
private lastByteIndex?: number;
|
||||
}
|
||||
|
||||
const md4: md4;
|
||||
export default md4;
|
||||
}
|
||||
15
packages/bun-polyfills/src/types/sync.d.ts
vendored
Normal file
15
packages/bun-polyfills/src/types/sync.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// This file explicitly redefines global types used in order to enforce the correct types,
|
||||
// regardless of the arbitrary order in which TSC/TSServer decide to load the type libraries in.
|
||||
// Annoyingly, even this file can sometimes break, so if your types are inverted, try restarting TSServer.
|
||||
|
||||
import '@types/node';
|
||||
|
||||
declare module 'stream/web' {
|
||||
interface ReadableStreamDefaultReader {
|
||||
readMany(): Promise<ReadableStreamDefaultReadManyResult<any>>;
|
||||
}
|
||||
}
|
||||
|
||||
declare global {
|
||||
var performance: typeof import('perf_hooks').performance;
|
||||
}
|
||||
24
packages/bun-polyfills/src/types/v8heapsnapshot.d.ts
vendored
Normal file
24
packages/bun-polyfills/src/types/v8heapsnapshot.d.ts
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
interface V8HeapSnapshot {
|
||||
snapshot: {
|
||||
meta: {
|
||||
node_fields: string[],
|
||||
node_types: [string[], ...string[]],
|
||||
edge_fields: string[],
|
||||
edge_types: [string[], ...string[]],
|
||||
trace_function_info_fields: string[],
|
||||
trace_node_fields: string[],
|
||||
sample_fields: string[],
|
||||
location_fields: string[]
|
||||
},
|
||||
node_count: number,
|
||||
edge_count: number,
|
||||
trace_function_count: number
|
||||
},
|
||||
nodes: number[],
|
||||
edges: number[],
|
||||
trace_function_infos: unknown[],
|
||||
trace_tree: unknown[],
|
||||
samples: unknown[],
|
||||
locations: number[],
|
||||
strings: string[]
|
||||
}
|
||||
221
packages/bun-polyfills/src/utils/errors.ts
Normal file
221
packages/bun-polyfills/src/utils/errors.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
type PosixErrNo = MapKeysType<ReturnType<typeof getPosixSystemErrorMap>>;
|
||||
type Win32ErrNo = MapKeysType<ReturnType<typeof getWin32SystemErrorMap>>;
|
||||
|
||||
export function getPosixSystemErrorMap() {
|
||||
return new Map([
|
||||
[ -7, [ 'E2BIG', 'argument list too long' ] ],
|
||||
[ -13, [ 'EACCES', 'permission denied' ] ],
|
||||
[ -98, [ 'EADDRINUSE', 'address already in use' ] ],
|
||||
[ -99, [ 'EADDRNOTAVAIL', 'address not available' ] ],
|
||||
[ -97, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
|
||||
[ -11, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
|
||||
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
|
||||
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
|
||||
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
|
||||
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
|
||||
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
|
||||
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
|
||||
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
|
||||
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
|
||||
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
|
||||
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
|
||||
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
|
||||
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
|
||||
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
|
||||
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
|
||||
[ -114, [ 'EALREADY', 'connection already in progress' ] ],
|
||||
[ -9, [ 'EBADF', 'bad file descriptor' ] ],
|
||||
[ -16, [ 'EBUSY', 'resource busy or locked' ] ],
|
||||
[ -125, [ 'ECANCELED', 'operation canceled' ] ],
|
||||
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
|
||||
[ -103, [ 'ECONNABORTED', 'software caused connection abort' ] ],
|
||||
[ -111, [ 'ECONNREFUSED', 'connection refused' ] ],
|
||||
[ -104, [ 'ECONNRESET', 'connection reset by peer' ] ],
|
||||
[ -89, [ 'EDESTADDRREQ', 'destination address required' ] ],
|
||||
[ -17, [ 'EEXIST', 'file already exists' ] ],
|
||||
[ -14, [ 'EFAULT', 'bad address in system call argument' ] ],
|
||||
[ -27, [ 'EFBIG', 'file too large' ] ],
|
||||
[ -113, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
|
||||
[ -4, [ 'EINTR', 'interrupted system call' ] ],
|
||||
[ -22, [ 'EINVAL', 'invalid argument' ] ],
|
||||
[ -5, [ 'EIO', 'i/o error' ] ],
|
||||
[ -106, [ 'EISCONN', 'socket is already connected' ] ],
|
||||
[ -21, [ 'EISDIR', 'illegal operation on a directory' ] ],
|
||||
[ -40, [ 'ELOOP', 'too many symbolic links encountered' ] ],
|
||||
[ -24, [ 'EMFILE', 'too many open files' ] ],
|
||||
[ -90, [ 'EMSGSIZE', 'message too long' ] ],
|
||||
[ -36, [ 'ENAMETOOLONG', 'name too long' ] ],
|
||||
[ -100, [ 'ENETDOWN', 'network is down' ] ],
|
||||
[ -101, [ 'ENETUNREACH', 'network is unreachable' ] ],
|
||||
[ -23, [ 'ENFILE', 'file table overflow' ] ],
|
||||
[ -105, [ 'ENOBUFS', 'no buffer space available' ] ],
|
||||
[ -19, [ 'ENODEV', 'no such device' ] ],
|
||||
[ -2, [ 'ENOENT', 'no such file or directory' ] ],
|
||||
[ -12, [ 'ENOMEM', 'not enough memory' ] ],
|
||||
[ -64, [ 'ENONET', 'machine is not on the network' ] ],
|
||||
[ -92, [ 'ENOPROTOOPT', 'protocol not available' ] ],
|
||||
[ -28, [ 'ENOSPC', 'no space left on device' ] ],
|
||||
[ -38, [ 'ENOSYS', 'function not implemented' ] ],
|
||||
[ -107, [ 'ENOTCONN', 'socket is not connected' ] ],
|
||||
[ -20, [ 'ENOTDIR', 'not a directory' ] ],
|
||||
[ -39, [ 'ENOTEMPTY', 'directory not empty' ] ],
|
||||
[ -88, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
|
||||
[ -95, [ 'ENOTSUP', 'operation not supported on socket' ] ],
|
||||
[ -75, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
|
||||
[ -1, [ 'EPERM', 'operation not permitted' ] ],
|
||||
[ -32, [ 'EPIPE', 'broken pipe' ] ],
|
||||
[ -71, [ 'EPROTO', 'protocol error' ] ],
|
||||
[ -93, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
|
||||
[ -91, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
|
||||
[ -34, [ 'ERANGE', 'result too large' ] ],
|
||||
[ -30, [ 'EROFS', 'read-only file system' ] ],
|
||||
[ -108, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
|
||||
[ -29, [ 'ESPIPE', 'invalid seek' ] ],
|
||||
[ -3, [ 'ESRCH', 'no such process' ] ],
|
||||
[ -110, [ 'ETIMEDOUT', 'connection timed out' ] ],
|
||||
[ -26, [ 'ETXTBSY', 'text file is busy' ] ],
|
||||
[ -18, [ 'EXDEV', 'cross-device link not permitted' ] ],
|
||||
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
|
||||
[ -4095, [ 'EOF', 'end of file' ] ],
|
||||
[ -6, [ 'ENXIO', 'no such device or address' ] ],
|
||||
[ -31, [ 'EMLINK', 'too many links' ] ],
|
||||
[ -112, [ 'EHOSTDOWN', 'host is down' ] ],
|
||||
[ -121, [ 'EREMOTEIO', 'remote I/O error' ] ],
|
||||
[ -25, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
|
||||
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
|
||||
[ -84, [ 'EILSEQ', 'illegal byte sequence' ] ],
|
||||
[ -94, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
|
||||
] as const);
|
||||
}
|
||||
|
||||
export function getWin32SystemErrorMap() {
|
||||
return new Map([
|
||||
[ -4093, [ 'E2BIG', 'argument list too long' ] ],
|
||||
[ -4092, [ 'EACCES', 'permission denied' ] ],
|
||||
[ -4091, [ 'EADDRINUSE', 'address already in use' ] ],
|
||||
[ -4090, [ 'EADDRNOTAVAIL', 'address not available' ] ],
|
||||
[ -4089, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
|
||||
[ -4088, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
|
||||
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
|
||||
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
|
||||
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
|
||||
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
|
||||
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
|
||||
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
|
||||
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
|
||||
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
|
||||
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
|
||||
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
|
||||
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
|
||||
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
|
||||
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
|
||||
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
|
||||
[ -4084, [ 'EALREADY', 'connection already in progress' ] ],
|
||||
[ -4083, [ 'EBADF', 'bad file descriptor' ] ],
|
||||
[ -4082, [ 'EBUSY', 'resource busy or locked' ] ],
|
||||
[ -4081, [ 'ECANCELED', 'operation canceled' ] ],
|
||||
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
|
||||
[ -4079, [ 'ECONNABORTED', 'software caused connection abort' ] ],
|
||||
[ -4078, [ 'ECONNREFUSED', 'connection refused' ] ],
|
||||
[ -4077, [ 'ECONNRESET', 'connection reset by peer' ] ],
|
||||
[ -4076, [ 'EDESTADDRREQ', 'destination address required' ] ],
|
||||
[ -4075, [ 'EEXIST', 'file already exists' ] ],
|
||||
[ -4074, [ 'EFAULT', 'bad address in system call argument' ] ],
|
||||
[ -4036, [ 'EFBIG', 'file too large' ] ],
|
||||
[ -4073, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
|
||||
[ -4072, [ 'EINTR', 'interrupted system call' ] ],
|
||||
[ -4071, [ 'EINVAL', 'invalid argument' ] ],
|
||||
[ -4070, [ 'EIO', 'i/o error' ] ],
|
||||
[ -4069, [ 'EISCONN', 'socket is already connected' ] ],
|
||||
[ -4068, [ 'EISDIR', 'illegal operation on a directory' ] ],
|
||||
[ -4067, [ 'ELOOP', 'too many symbolic links encountered' ] ],
|
||||
[ -4066, [ 'EMFILE', 'too many open files' ] ],
|
||||
[ -4065, [ 'EMSGSIZE', 'message too long' ] ],
|
||||
[ -4064, [ 'ENAMETOOLONG', 'name too long' ] ],
|
||||
[ -4063, [ 'ENETDOWN', 'network is down' ] ],
|
||||
[ -4062, [ 'ENETUNREACH', 'network is unreachable' ] ],
|
||||
[ -4061, [ 'ENFILE', 'file table overflow' ] ],
|
||||
[ -4060, [ 'ENOBUFS', 'no buffer space available' ] ],
|
||||
[ -4059, [ 'ENODEV', 'no such device' ] ],
|
||||
[ -4058, [ 'ENOENT', 'no such file or directory' ] ],
|
||||
[ -4057, [ 'ENOMEM', 'not enough memory' ] ],
|
||||
[ -4056, [ 'ENONET', 'machine is not on the network' ] ],
|
||||
[ -4035, [ 'ENOPROTOOPT', 'protocol not available' ] ],
|
||||
[ -4055, [ 'ENOSPC', 'no space left on device' ] ],
|
||||
[ -4054, [ 'ENOSYS', 'function not implemented' ] ],
|
||||
[ -4053, [ 'ENOTCONN', 'socket is not connected' ] ],
|
||||
[ -4052, [ 'ENOTDIR', 'not a directory' ] ],
|
||||
[ -4051, [ 'ENOTEMPTY', 'directory not empty' ] ],
|
||||
[ -4050, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
|
||||
[ -4049, [ 'ENOTSUP', 'operation not supported on socket' ] ],
|
||||
[ -4026, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
|
||||
[ -4048, [ 'EPERM', 'operation not permitted' ] ],
|
||||
[ -4047, [ 'EPIPE', 'broken pipe' ] ],
|
||||
[ -4046, [ 'EPROTO', 'protocol error' ] ],
|
||||
[ -4045, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
|
||||
[ -4044, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
|
||||
[ -4034, [ 'ERANGE', 'result too large' ] ],
|
||||
[ -4043, [ 'EROFS', 'read-only file system' ] ],
|
||||
[ -4042, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
|
||||
[ -4041, [ 'ESPIPE', 'invalid seek' ] ],
|
||||
[ -4040, [ 'ESRCH', 'no such process' ] ],
|
||||
[ -4039, [ 'ETIMEDOUT', 'connection timed out' ] ],
|
||||
[ -4038, [ 'ETXTBSY', 'text file is busy' ] ],
|
||||
[ -4037, [ 'EXDEV', 'cross-device link not permitted' ] ],
|
||||
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
|
||||
[ -4095, [ 'EOF', 'end of file' ] ],
|
||||
[ -4033, [ 'ENXIO', 'no such device or address' ] ],
|
||||
[ -4032, [ 'EMLINK', 'too many links' ] ],
|
||||
[ -4031, [ 'EHOSTDOWN', 'host is down' ] ],
|
||||
[ -4030, [ 'EREMOTEIO', 'remote I/O error' ] ],
|
||||
[ -4029, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
|
||||
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
|
||||
[ -4027, [ 'EILSEQ', 'illegal byte sequence' ] ],
|
||||
[ -4025, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
|
||||
] as const);
|
||||
}
|
||||
|
||||
export function getPosixToWin32SystemErrorMap() {
|
||||
const posixEntries = [...getPosixSystemErrorMap().entries()];
|
||||
const win32Entries = [...getWin32SystemErrorMap().entries()];
|
||||
const map: Map<PosixErrNo, Win32ErrNo> = new Map();
|
||||
posixEntries.forEach(([code, val]) => {
|
||||
const found = win32Entries.find(([_, v]) => v[0] === val[0]);
|
||||
if (!found) console.error(val[0]);
|
||||
else map.set(code, found[0]);
|
||||
});
|
||||
return map;
|
||||
}
|
||||
|
||||
export function getPlatformSystemErrorFromPosix(posixErrNo: PosixErrNo) {
|
||||
if (process.platform === 'win32') {
|
||||
const win32errno = getPosixToWin32SystemErrorMap().get(posixErrNo)!;
|
||||
return getWin32SystemErrorMap().get(win32errno);
|
||||
} else {
|
||||
return getPosixSystemErrorMap().get(posixErrNo);
|
||||
}
|
||||
}
|
||||
|
||||
export class SystemError extends Error {
|
||||
constructor(errno: PosixErrNo, syscall?: string, errpath?: string) {
|
||||
const [errname, errmsg] = getPlatformSystemErrorFromPosix(errno) ?? ['SystemError', 'Unknown system error'];
|
||||
super(errmsg);
|
||||
this.name = errname;
|
||||
this.code = errname;
|
||||
this.errno = errno;
|
||||
if (syscall) this.syscall = syscall;
|
||||
if (errpath) this.path = errpath;
|
||||
}
|
||||
errno?: number | undefined;
|
||||
code?: string | undefined;
|
||||
path?: string | undefined;
|
||||
syscall?: string | undefined;
|
||||
}
|
||||
|
||||
export class NotImplementedError extends Error {
|
||||
constructor(thing: string, func: AnyCallable = NotImplementedError, overrideMsg: boolean = false) {
|
||||
super(overrideMsg ? thing : `A polyfill for ${thing} is not yet implemented by bun-polyfills.`);
|
||||
this.name = 'NotImplementedError';
|
||||
Error.captureStackTrace(this, func);
|
||||
}
|
||||
}
|
||||
36
packages/bun-polyfills/src/utils/misc.ts
Normal file
36
packages/bun-polyfills/src/utils/misc.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import streams from 'node:stream';
|
||||
import type { SpawnOptions, FileBlob } from 'bun';
|
||||
|
||||
export const getter = <T>(obj: T, key: string | symbol, get: () => any, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { get, configurable, enumerable });
|
||||
};
|
||||
|
||||
export const setter = <T>(obj: T, key: string | symbol, set: () => any, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { set, configurable, enumerable });
|
||||
};
|
||||
|
||||
export const readonly = <T>(obj: T, key: string | symbol, value: unknown, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { value, configurable, enumerable });
|
||||
};
|
||||
|
||||
export function streamToBuffer(stream: streams.Readable | streams.Duplex): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const buffers: Uint8Array[] = [];
|
||||
stream.on("data", (chunk: Uint8Array) => buffers.push(chunk));
|
||||
stream.on("end", () => resolve(Buffer.concat(buffers)));
|
||||
stream.on("error", (err: Error) => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
export function isArrayBufferView(value: any): value is ArrayBufferView {
|
||||
return value !== null && typeof value === 'object' &&
|
||||
value.buffer instanceof ArrayBuffer && typeof value.byteLength === 'number' && typeof value.byteOffset === 'number';
|
||||
}
|
||||
|
||||
export function isOptions(options: any): options is SpawnOptions.OptionsObject {
|
||||
return options !== null && typeof options === 'object';
|
||||
}
|
||||
|
||||
export function isFileBlob(blob: any): blob is FileBlob {
|
||||
return blob instanceof Blob && Reflect.get(blob, 'readable') instanceof ReadableStream && typeof Reflect.get(blob, 'writer') === 'function';
|
||||
}
|
||||
19
packages/bun-polyfills/tsconfig.json
Normal file
19
packages/bun-polyfills/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"moduleDetection": "force",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": [".", "../bun-types/index.d.ts"],
|
||||
}
|
||||
43
packages/bun-types/bun.d.ts
vendored
43
packages/bun-types/bun.d.ts
vendored
@@ -24,7 +24,7 @@ declare module "bun" {
|
||||
import { Encoding as CryptoEncoding } from "crypto";
|
||||
|
||||
export interface Env extends Dict<string>, NodeJS.ProcessEnv {
|
||||
NODE_ENV: string;
|
||||
NODE_ENV?: string;
|
||||
|
||||
/**
|
||||
* The timezone used by Intl, Date, etc.
|
||||
@@ -68,7 +68,7 @@ declare module "bun" {
|
||||
export function which(
|
||||
command: string,
|
||||
options?: { PATH?: string; cwd?: string },
|
||||
): string;
|
||||
): string | null;
|
||||
|
||||
export type Serve<WebSocketDataType = undefined> =
|
||||
| ServeOptions
|
||||
@@ -752,39 +752,40 @@ declare module "bun" {
|
||||
*/
|
||||
export const hash: ((
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
seed?: number | bigint,
|
||||
) => number | bigint) &
|
||||
Hash;
|
||||
|
||||
interface Hash {
|
||||
wyhash: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
crc32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
adler32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
crc32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
) => number;
|
||||
cityHash32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
cityHash64: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
murmur32v3: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
murmur64v2: (
|
||||
) => number;
|
||||
murmur32v2: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
murmur64v2: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
}
|
||||
|
||||
export type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
|
||||
@@ -2428,8 +2429,8 @@ declare module "bun" {
|
||||
* If you have any ideas, please file an issue https://github.com/oven-sh/bun
|
||||
*/
|
||||
interface HeapSnapshot {
|
||||
/** "2" */
|
||||
version: string;
|
||||
/** 2 */
|
||||
version: number;
|
||||
|
||||
/** "Inspector" */
|
||||
type: string;
|
||||
@@ -2675,7 +2676,7 @@ declare module "bun" {
|
||||
* openssl sha512-256 /path/to/file
|
||||
*```
|
||||
*/
|
||||
export function sha(input: StringOrBuffer, hashInto?: Uint8Array): Uint8Array;
|
||||
export function sha(input: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
|
||||
/**
|
||||
*
|
||||
|
||||
4
packages/bun-types/globals.d.ts
vendored
4
packages/bun-types/globals.d.ts
vendored
@@ -2648,10 +2648,6 @@ interface ReadableStream<R = any> {
|
||||
options?: StreamPipeOptions,
|
||||
): Promise<void>;
|
||||
tee(): [ReadableStream<R>, ReadableStream<R>];
|
||||
forEach(
|
||||
callbackfn: (value: any, key: number, parent: ReadableStream<R>) => void,
|
||||
thisArg?: any,
|
||||
): void;
|
||||
[Symbol.asyncIterator](): AsyncIterableIterator<R>;
|
||||
values(options?: { preventCancel: boolean }): AsyncIterableIterator<R>;
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
Bun.hash.wyhash("asdf", 1234);
|
||||
const hash: bigint = Bun.hash.wyhash("asdf", 1234n);
|
||||
|
||||
BIN
test/bun.lockb
BIN
test/bun.lockb
Binary file not shown.
Reference in New Issue
Block a user