mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
316 Commits
fix-node-h
...
pfg/blocki
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
65c3d963a7 | ||
|
|
992effaa26 | ||
|
|
dbd30fe33a | ||
|
|
af0704b35a | ||
|
|
9e201eff9e | ||
|
|
5874cc44d3 | ||
|
|
d4cad5d856 | ||
|
|
d60fcefc0c | ||
|
|
7e63a7e228 | ||
|
|
fbbc16fec6 | ||
|
|
e91d190d94 | ||
|
|
44c97fa591 | ||
|
|
7230b88c5d | ||
|
|
11978cd48d | ||
|
|
2fd2d6eeea | ||
|
|
2a2247bbb6 | ||
|
|
3ea7953474 | ||
|
|
b97cc6cb6c | ||
|
|
4dc7076cad | ||
|
|
8b10b0ae6b | ||
|
|
d4b02dcdc2 | ||
|
|
26e296a7e8 | ||
|
|
cb6abd2116 | ||
|
|
8bd05b534d | ||
|
|
7b134693d6 | ||
|
|
ec6cb8283e | ||
|
|
93ff4d97da | ||
|
|
465379d96a | ||
|
|
745b37038c | ||
|
|
1ee7bab0e7 | ||
|
|
915b8be722 | ||
|
|
448340cf3f | ||
|
|
9ca2e1445c | ||
|
|
4b297ef61a | ||
|
|
b82b330954 | ||
|
|
147fb975e1 | ||
|
|
b7c5bd0922 | ||
|
|
b97561f3f8 | ||
|
|
ea681fa9ec | ||
|
|
d531e86634 | ||
|
|
4c059b290c | ||
|
|
46881358e5 | ||
|
|
b58afbc7d5 | ||
|
|
092ad39f6c | ||
|
|
181da96604 | ||
|
|
397aa4a8a4 | ||
|
|
8d3f1d07f9 | ||
|
|
eb0ea8e96b | ||
|
|
5152254b2b | ||
|
|
83a2a64965 | ||
|
|
a1eb595d86 | ||
|
|
e6c516465e | ||
|
|
316cc20456 | ||
|
|
59b2a60790 | ||
|
|
d070f110d0 | ||
|
|
2d8009c19b | ||
|
|
33d656eebe | ||
|
|
1924c27f1d | ||
|
|
7a3dea8ac4 | ||
|
|
512dee748b | ||
|
|
41388204b9 | ||
|
|
98c66605e5 | ||
|
|
be65720f71 | ||
|
|
9f0ba15995 | ||
|
|
c97bbe6428 | ||
|
|
f5fdd02237 | ||
|
|
5cc34b667c | ||
|
|
80aff24951 | ||
|
|
1294128b47 | ||
|
|
506afcbc7e | ||
|
|
9646bf1a38 | ||
|
|
ce8cae060d | ||
|
|
6aaef99f88 | ||
|
|
acc2925bbb | ||
|
|
3c2e5c22e6 | ||
|
|
3349c995b5 | ||
|
|
842fe8664e | ||
|
|
00689e13a0 | ||
|
|
27a5712ed3 | ||
|
|
508b4d2783 | ||
|
|
0471254e4e | ||
|
|
b117d14650 | ||
|
|
a512ad5155 | ||
|
|
78fd584c0d | ||
|
|
d7a3e9e3a1 | ||
|
|
b07aea6161 | ||
|
|
ce5d4c8ddc | ||
|
|
032713c58c | ||
|
|
7e8e559fce | ||
|
|
218ee99155 | ||
|
|
6e3519fd49 | ||
|
|
a001c584dd | ||
|
|
b7c7b2dd7f | ||
|
|
7d7512076b | ||
|
|
a3809676e9 | ||
|
|
6bf2e57933 | ||
|
|
4ec410e0d7 | ||
|
|
ef17164c69 | ||
|
|
169f9eb1df | ||
|
|
a137a0e986 | ||
|
|
681a1ec3bb | ||
|
|
509bbb342b | ||
|
|
db2e7d7f74 | ||
|
|
47d2b00adc | ||
|
|
be77711a4e | ||
|
|
903706dccf | ||
|
|
caeea11706 | ||
|
|
6029567ba4 | ||
|
|
6b53301375 | ||
|
|
27a580a4d5 | ||
|
|
74ab6d9fe3 | ||
|
|
c6701ac174 | ||
|
|
7e03e5e712 | ||
|
|
ab431f158a | ||
|
|
92f5c82711 | ||
|
|
a6a0fc3885 | ||
|
|
f730a355bf | ||
|
|
f937750ff0 | ||
|
|
c682925c9c | ||
|
|
028475d5e3 | ||
|
|
34fa67858d | ||
|
|
e48031e08d | ||
|
|
7d8a376d5e | ||
|
|
d076e5389b | ||
|
|
31b81637c8 | ||
|
|
acf0b68299 | ||
|
|
879fdd7ef6 | ||
|
|
c684a0c8ce | ||
|
|
921874f0b3 | ||
|
|
5a1023a49b | ||
|
|
bed2f9a7b1 | ||
|
|
b38e5e82af | ||
|
|
35025fe161 | ||
|
|
e9c3f9186e | ||
|
|
f575d3ff24 | ||
|
|
27f83c38af | ||
|
|
c1dc5f1b73 | ||
|
|
89d82a0b1b | ||
|
|
75988aa14a | ||
|
|
06e0c876f5 | ||
|
|
93855bd88c | ||
|
|
950ce32cd0 | ||
|
|
b00e8037c5 | ||
|
|
4ccf5c03dc | ||
|
|
8c7c42055b | ||
|
|
1d6bdf745b | ||
|
|
9f023d7471 | ||
|
|
44f252539a | ||
|
|
44f70b4301 | ||
|
|
9a329c04cc | ||
|
|
f677ac322c | ||
|
|
03f3a59ff5 | ||
|
|
4a44257457 | ||
|
|
f912fd8100 | ||
|
|
dff1f555b4 | ||
|
|
d028e1aaa3 | ||
|
|
5f9f200e7e | ||
|
|
5fa14574a6 | ||
|
|
eee5d4fb4a | ||
|
|
ca8b7fb36e | ||
|
|
ad3f367520 | ||
|
|
02023810ba | ||
|
|
9eae7787a0 | ||
|
|
ec87a27d87 | ||
|
|
431b28fd6b | ||
|
|
f5a710f324 | ||
|
|
95fead19f9 | ||
|
|
78ee4a3e82 | ||
|
|
ed410d0597 | ||
|
|
ba0bd426ed | ||
|
|
a2efbd4ca2 | ||
|
|
5d1ca1f371 | ||
|
|
580e743ebd | ||
|
|
9fa3bc4b93 | ||
|
|
8b2b34086c | ||
|
|
340ae94d0f | ||
|
|
e75d226943 | ||
|
|
a8cc31f8c4 | ||
|
|
a1e1f720ed | ||
|
|
c86097aeb0 | ||
|
|
d1ac711a7e | ||
|
|
378c68a024 | ||
|
|
84001acf22 | ||
|
|
28e7a830a0 | ||
|
|
bb4f8d8933 | ||
|
|
6c3aaefed2 | ||
|
|
11f9538b9e | ||
|
|
2bbdf4f950 | ||
|
|
8414ef1562 | ||
|
|
f505cf6f66 | ||
|
|
a52f2f4a8d | ||
|
|
d9c77be90d | ||
|
|
04a432f54f | ||
|
|
94addcf2a5 | ||
|
|
d5660f7a37 | ||
|
|
3e358a1708 | ||
|
|
6b206ae0a9 | ||
|
|
4afaa4cb60 | ||
|
|
c40663bdf1 | ||
|
|
11f2b5fb55 | ||
|
|
3577dd8924 | ||
|
|
976330f4e2 | ||
|
|
b950f85705 | ||
|
|
d7a8208ff5 | ||
|
|
0efbbd3870 | ||
|
|
ebb03afae0 | ||
|
|
b6f919caba | ||
|
|
c6076f2e4e | ||
|
|
946f41c01a | ||
|
|
4cea70a484 | ||
|
|
5392cd1d28 | ||
|
|
38a776a404 | ||
|
|
575d2c40a8 | ||
|
|
c29933f823 | ||
|
|
13068395c0 | ||
|
|
e7576bb204 | ||
|
|
4806e84cc1 | ||
|
|
a199b85f2b | ||
|
|
323d78df5e | ||
|
|
adab0f64f9 | ||
|
|
41d3f1bc9d | ||
|
|
b34703914c | ||
|
|
f3da1b80bc | ||
|
|
0814abe21e | ||
|
|
c3be6732d1 | ||
|
|
c3e2bf0fc4 | ||
|
|
78a9396038 | ||
|
|
e2ce3bd4ce | ||
|
|
fee911194a | ||
|
|
358a1db422 | ||
|
|
8929d65f0e | ||
|
|
f14e26bc85 | ||
|
|
43f7a241b9 | ||
|
|
7021c42cf2 | ||
|
|
1b10b61423 | ||
|
|
bb9128c0e8 | ||
|
|
f38d35f7c9 | ||
|
|
f0dfa109bb | ||
|
|
27cf65a1e2 | ||
|
|
e83b5fb720 | ||
|
|
ee89130991 | ||
|
|
0a4f36644f | ||
|
|
a1ab2a4780 | ||
|
|
451c1905a8 | ||
|
|
accccbfdaf | ||
|
|
8e0c8a143e | ||
|
|
9ea577efc0 | ||
|
|
54416dad05 | ||
|
|
8f4575c0e4 | ||
|
|
c7edb24520 | ||
|
|
325acfc230 | ||
|
|
7f60375cca | ||
|
|
dac7f22997 | ||
|
|
f5836c2013 | ||
|
|
70ddfb55e6 | ||
|
|
934e41ae59 | ||
|
|
f4ae8c7254 | ||
|
|
2a9569cec4 | ||
|
|
31060a5e2a | ||
|
|
5c0fa6dc21 | ||
|
|
53f311fdd9 | ||
|
|
b40f5c9669 | ||
|
|
317e9d23ab | ||
|
|
11bb3573ea | ||
|
|
39cf0906d1 | ||
|
|
1d655a0232 | ||
|
|
a548c2ec54 | ||
|
|
7740271359 | ||
|
|
75144ab881 | ||
|
|
1dbeed20a9 | ||
|
|
3af6f7a5fe | ||
|
|
1bfccf707b | ||
|
|
21853d08de | ||
|
|
b6502189e8 | ||
|
|
f4ab2e4986 | ||
|
|
57cda4a445 | ||
|
|
49ca2c86e7 | ||
|
|
a08a9c5bfb | ||
|
|
ee8a839500 | ||
|
|
8ee962d79f | ||
|
|
4c3d652f00 | ||
|
|
c21fca08e2 | ||
|
|
77fde278e8 | ||
|
|
517af630e7 | ||
|
|
d8e5335268 | ||
|
|
db492575c8 | ||
|
|
9e580f8413 | ||
|
|
6ba2ba41c6 | ||
|
|
57381d43ed | ||
|
|
90c67c4b79 | ||
|
|
cf9f2bf98e | ||
|
|
8ebd5d53da | ||
|
|
60acfb17f0 | ||
|
|
8735a3f4d6 | ||
|
|
a07844ea13 | ||
|
|
1656bca9ab | ||
|
|
43af1a2283 | ||
|
|
84a21234d4 | ||
|
|
fefdaefb97 | ||
|
|
50eaea19cb | ||
|
|
438d8555c6 | ||
|
|
163a51c0f6 | ||
|
|
8df7064f73 | ||
|
|
99ee90a58f | ||
|
|
46c43d954c | ||
|
|
b37054697b | ||
|
|
5d50281f1a | ||
|
|
6bef525704 | ||
|
|
687a0ab5a4 | ||
|
|
60ae19bded | ||
|
|
be41c884b4 | ||
|
|
73d1b2ff67 | ||
|
|
2312b2c0f2 | ||
|
|
eae2c889ed | ||
|
|
ddd87fef12 | ||
|
|
f36d480919 |
@@ -436,7 +436,10 @@ function getBuildCppStep(platform, options) {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
// We used to build the C++ dependencies and bun in seperate steps.
|
||||
// However, as long as the zig build takes longer than both sequentially,
|
||||
// it's cheaper to run them in the same step. Can be revisited in the future.
|
||||
command: ["bun run build:ci --target bun", "bun run build:ci --target dependencies"],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -484,11 +487,7 @@ function getLinkBunStep(platform, options) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
depends_on: [
|
||||
`${getTargetKey(platform)}-build-vendor`,
|
||||
`${getTargetKey(platform)}-build-cpp`,
|
||||
`${getTargetKey(platform)}-build-zig`,
|
||||
],
|
||||
depends_on: [`${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`],
|
||||
agents: getCppAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
@@ -1089,12 +1088,7 @@ async function getPipeline(options = {}) {
|
||||
group: getTargetLabel(target),
|
||||
steps: unifiedBuilds
|
||||
? [getBuildBunStep(target, options)]
|
||||
: [
|
||||
getBuildVendorStep(target, options),
|
||||
getBuildCppStep(target, options),
|
||||
getBuildZigStep(target, options),
|
||||
getLinkBunStep(target, options),
|
||||
],
|
||||
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
|
||||
},
|
||||
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
|
||||
);
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
---
|
||||
description: JavaScript class implemented in C++
|
||||
globs: *.cpp
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Implementing JavaScript classes in C++
|
||||
|
||||
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
|
||||
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSBinding.cpp](mdc:src/bun.js/bindings/NodeFSBinding.cpp).
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
|
||||
- class FooPrototype : public JSC::JSNonFinalObject
|
||||
- class FooConstructor : public JSC::InternalFunction
|
||||
|
||||
@@ -18,6 +19,7 @@ If there are C++ fields on the Foo class, the Foo class will need an iso subspac
|
||||
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
|
||||
|
||||
Generally, defining the subspace looks like this:
|
||||
|
||||
```c++
|
||||
|
||||
class Foo : public JSC::DestructibleObject {
|
||||
@@ -45,6 +47,7 @@ It's better to put it in the .cpp file instead of the .h file, when possible.
|
||||
## Defining properties
|
||||
|
||||
Define properties on the prototype. Use a const HashTableValues like this:
|
||||
|
||||
```C++
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
|
||||
@@ -158,6 +161,7 @@ void JSX509CertificatePrototype::finishCreation(VM& vm)
|
||||
```
|
||||
|
||||
### Getter definition:
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
|
||||
@@ -212,7 +216,6 @@ JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * glo
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Constructor definition
|
||||
|
||||
```C++
|
||||
@@ -259,7 +262,6 @@ private:
|
||||
};
|
||||
```
|
||||
|
||||
|
||||
### Structure caching
|
||||
|
||||
If there's a class, prototype, and constructor:
|
||||
@@ -279,6 +281,7 @@ void GlobalObject::finishCreation(VM& vm) {
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
|
||||
```c++
|
||||
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
|
||||
{
|
||||
@@ -301,11 +304,12 @@ If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` inst
|
||||
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
|
||||
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
|
||||
});
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
@@ -316,7 +320,7 @@ Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalO
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
// If there is no prototype or it only has
|
||||
// If there is no prototype or it only has
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
@@ -325,7 +329,6 @@ Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalO
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
|
||||
|
||||
```C++
|
||||
@@ -378,12 +381,14 @@ extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalOb
|
||||
```
|
||||
|
||||
Zig:
|
||||
|
||||
```zig
|
||||
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
|
||||
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
|
||||
```
|
||||
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the __toJS convention like this:
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
|
||||
|
||||
```c++
|
||||
// X509* is whatever we need to create the object
|
||||
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
|
||||
@@ -395,12 +400,13 @@ extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509*
|
||||
```
|
||||
|
||||
And from Zig:
|
||||
|
||||
```zig
|
||||
const X509 = opaque {
|
||||
// ... class
|
||||
// ... class
|
||||
|
||||
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
|
||||
|
||||
|
||||
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return Bun__X509__toJS(globalObject, this);
|
||||
}
|
||||
|
||||
498
.cursor/rules/zig-javascriptcore-classes.mdc
Normal file
498
.cursor/rules/zig-javascriptcore-classes.mdc
Normal file
@@ -0,0 +1,498 @@
|
||||
---
|
||||
description: How Zig works with JavaScriptCore bindings generator
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files that connect everything)
|
||||
|
||||
## Class Definition Files
|
||||
|
||||
### JavaScript Interface (.classes.ts)
|
||||
|
||||
The `.classes.ts` files define the JavaScript API using a declarative approach:
|
||||
|
||||
```typescript
|
||||
// Example: encoding.classes.ts
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: {
|
||||
// Function definition
|
||||
args: 1,
|
||||
},
|
||||
encoding: {
|
||||
// Getter with caching
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
fatal: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
ignoreBOM: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Each class definition specifies:
|
||||
- The class name
|
||||
- Whether it has a constructor
|
||||
- JavaScript type (object, function, etc.)
|
||||
- Properties and methods in the `proto` field
|
||||
- Caching strategy for properties
|
||||
- Finalization requirements
|
||||
|
||||
### Zig Implementation (.zig)
|
||||
|
||||
The Zig files implement the native functionality:
|
||||
|
||||
```zig
|
||||
// Example: TextDecoder.zig
|
||||
pub const TextDecoder = struct {
|
||||
// Expose generated bindings as `js` namespace with trait conversion methods
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// Internal state
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
ignoreBOM: bool,
|
||||
|
||||
// Constructor implementation - note use of globalObject
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
// Implementation
|
||||
|
||||
return bun.new(TextDecoder, .{
|
||||
// Fields
|
||||
});
|
||||
}
|
||||
|
||||
// Prototype methods - note return type includes JSError
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getters
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.fatal);
|
||||
}
|
||||
|
||||
// Cleanup - note standard pattern of using deinit/deref
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release any retained resources
|
||||
// Free the pointer at the end.
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
Key components in the Zig file:
|
||||
- The struct containing native state
|
||||
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
|
||||
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
|
||||
- Consistent use of `globalObject` parameter name instead of `ctx`
|
||||
- Methods matching the JavaScript interface
|
||||
- Getters/setters for properties
|
||||
- Proper resource cleanup pattern with `deinit()` and `finalize()`
|
||||
|
||||
## Code Generation System
|
||||
|
||||
The binding generator produces C++ code that connects JavaScript and Zig:
|
||||
|
||||
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
|
||||
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
|
||||
3. **Method Binding**: Connects JS function calls to Zig implementations
|
||||
4. **Type Conversion**: Converts between JS values and Zig types
|
||||
5. **Property Caching**: Implements the caching system for properties
|
||||
|
||||
The generated C++ code includes:
|
||||
- A JSC wrapper class (`JSTextDecoder`)
|
||||
- A prototype class (`JSTextDecoderPrototype`)
|
||||
- A constructor function (`JSTextDecoderConstructor`)
|
||||
- Function bindings (`TextDecoderPrototype__decodeCallback`)
|
||||
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
The `CallFrame` object provides access to JavaScript execution context:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Get arguments
|
||||
const input = callFrame.argument(0);
|
||||
const options = callFrame.argument(1);
|
||||
|
||||
// Get this value
|
||||
const thisValue = callFrame.thisValue();
|
||||
|
||||
// Implementation with error handling
|
||||
if (input.isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null or undefined", .{});
|
||||
}
|
||||
|
||||
// Return value or throw error
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
```
|
||||
|
||||
CallFrame methods include:
|
||||
- `argument(i)`: Get the i-th argument
|
||||
- `argumentCount()`: Get the number of arguments
|
||||
- `thisValue()`: Get the `this` value
|
||||
- `callee()`: Get the function being called
|
||||
|
||||
## Property Caching and GC-Owned Values
|
||||
|
||||
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
|
||||
|
||||
```typescript
|
||||
encoding: {
|
||||
getter: true,
|
||||
cache: true, // Enable caching
|
||||
}
|
||||
```
|
||||
|
||||
### C++ Implementation
|
||||
|
||||
In the generated C++ code, caching uses JSC's WriteBarrier:
|
||||
|
||||
```cpp
|
||||
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
|
||||
auto& vm = JSC::getVM(lexicalGlobalObject);
|
||||
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
|
||||
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
|
||||
|
||||
// Check for cached value and return if present
|
||||
if (JSValue cachedValue = thisObject->m_encoding.get())
|
||||
return JSValue::encode(cachedValue);
|
||||
|
||||
// Get value from Zig implementation
|
||||
JSC::JSValue result = JSC::JSValue::decode(
|
||||
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
|
||||
);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
// Store in cache for future access
|
||||
thisObject->m_encoding.set(vm, thisObject, result);
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
|
||||
}
|
||||
```
|
||||
|
||||
### Zig Accessor Functions
|
||||
|
||||
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
|
||||
|
||||
```zig
|
||||
// External function declarations
|
||||
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
|
||||
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
|
||||
|
||||
/// `TextDecoder.encoding` setter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
JSC.markBinding(@src());
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
|
||||
/// `TextDecoder.encoding` getter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero)
|
||||
return null;
|
||||
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
### Benefits of GC-Owned Values
|
||||
|
||||
This system provides several key benefits:
|
||||
|
||||
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
|
||||
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
|
||||
3. **Consistent Access**: Zig code can easily get/set these cached JS values
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
|
||||
### Use Cases
|
||||
|
||||
GC-owned cached values are particularly useful for:
|
||||
|
||||
1. **Computed Properties**: Store expensive computation results
|
||||
2. **Lazily Created Objects**: Create objects only when needed, then cache them
|
||||
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
|
||||
4. **Memoization**: Cache results based on input parameters
|
||||
|
||||
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
|
||||
|
||||
## Memory Management and Finalization
|
||||
|
||||
The binding system handles memory management across the JavaScript/Zig boundary:
|
||||
|
||||
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
|
||||
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
|
||||
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
|
||||
|
||||
Bun uses a consistent pattern for resource cleanup:
|
||||
|
||||
```zig
|
||||
// Resource cleanup method - separate from finalization
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
// Release resources like strings
|
||||
this._encoding.deref(); // String deref pattern
|
||||
|
||||
// Free any buffers
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the GC when object is collected
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src()); // For debugging
|
||||
this.deinit(); // Clean up resources
|
||||
bun.default_allocator.destroy(this); // Free the object itself
|
||||
}
|
||||
```
|
||||
|
||||
Some objects that hold references to other JS objects use `.deref()` instead:
|
||||
|
||||
```zig
|
||||
pub fn finalize(this: *SocketAddress) void {
|
||||
JSC.markBinding(@src());
|
||||
this._presentation.deref(); // Release references
|
||||
this.destroy();
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling with JSError
|
||||
|
||||
Bun uses `bun.JSError!JSValue` return type for proper error handling:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Throwing an error
|
||||
if (callFrame.argumentCount() < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
|
||||
// Or returning a success value
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
This pattern allows Zig functions to:
|
||||
1. Return JavaScript values on success
|
||||
2. Throw JavaScript exceptions on error
|
||||
3. Propagate errors automatically through the call stack
|
||||
|
||||
## Type Safety and Error Handling
|
||||
|
||||
The binding system includes robust error handling:
|
||||
|
||||
```cpp
|
||||
// Example of type checking in generated code
|
||||
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
|
||||
return {};
|
||||
}
|
||||
```
|
||||
|
||||
## Prototypal Inheritance
|
||||
|
||||
The binding system creates proper JavaScript prototype chains:
|
||||
|
||||
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
|
||||
2. **Prototype**: JSTextDecoderPrototype with methods and properties
|
||||
3. **Instances**: Each JSTextDecoder instance with __proto__ pointing to prototype
|
||||
|
||||
This ensures JavaScript inheritance works as expected:
|
||||
|
||||
```cpp
|
||||
// From generated code
|
||||
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
|
||||
|
||||
// Set up the prototype chain
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The binding system is optimized for performance:
|
||||
|
||||
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
|
||||
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
|
||||
|
||||
## Creating a New Class Binding
|
||||
|
||||
To create a new class binding in Bun:
|
||||
|
||||
1. **Define the class interface** in a `.classes.ts` file:
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: {
|
||||
args: 1,
|
||||
},
|
||||
myProperty: {
|
||||
getter: true,
|
||||
cache: true,
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
2. **Implement the native functionality** in a `.zig` file:
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
// Generated bindings
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// State
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
// Constructor
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*MyClass {
|
||||
const arg = callFrame.argument(0);
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Method
|
||||
pub fn myMethod(
|
||||
this: *MyClass,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getter
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
// Resource cleanup
|
||||
pub fn deinit(this: *MyClass) void {
|
||||
// Clean up resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
|
||||
- C++ class definitions
|
||||
- Method and property bindings
|
||||
- Memory management utilities
|
||||
- GC integration code
|
||||
|
||||
## Generated Code Structure
|
||||
|
||||
The binding generator produces several components:
|
||||
|
||||
### 1. C++ Classes
|
||||
|
||||
For each Zig class, the system generates:
|
||||
|
||||
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
|
||||
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
|
||||
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
|
||||
|
||||
### 2. C++ Methods and Properties
|
||||
|
||||
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
|
||||
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
|
||||
- **Initialization Functions**: `finishCreation` methods for setting up the class
|
||||
|
||||
### 3. Zig Bindings
|
||||
|
||||
- **External Function Declarations**:
|
||||
```zig
|
||||
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
|
||||
```
|
||||
|
||||
- **Cached Value Accessors**:
|
||||
```zig
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
|
||||
```
|
||||
|
||||
- **Constructor Helpers**:
|
||||
```zig
|
||||
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
|
||||
```
|
||||
|
||||
### 4. GC Integration
|
||||
|
||||
- **Memory Cost Calculation**: `estimatedSize` method
|
||||
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
|
||||
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
|
||||
|
||||
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.
|
||||
8
.git-blame-ignore-revs
Normal file
8
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,8 @@
|
||||
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
|
||||
# avoid mucking up blames.
|
||||
#
|
||||
# To configure git to use this, run:
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -28,7 +28,7 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
|
||||
1
.github/workflows/docs.yml
vendored
1
.github/workflows/docs.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "packages/bun-types/**.d.ts"
|
||||
- "CONTRIBUTING.md"
|
||||
branches:
|
||||
- main
|
||||
|
||||
5
.github/workflows/lint.yml
vendored
5
.github/workflows/lint.yml
vendored
@@ -5,8 +5,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.0"
|
||||
OXLINT_VERSION: "0.15.0"
|
||||
BUN_VERSION: "1.2.10"
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
@@ -19,4 +18,4 @@ jobs:
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Lint
|
||||
run: bunx oxlint --config oxlint.json --quiet --format github
|
||||
run: bun lint
|
||||
|
||||
9
.github/workflows/update-cares.yml
vendored
9
.github/workflows/update-cares.yml
vendored
@@ -50,11 +50,16 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
|
||||
9
.github/workflows/update-libarchive.yml
vendored
9
.github/workflows/update-libarchive.yml
vendored
@@ -50,11 +50,16 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
|
||||
9
.github/workflows/update-libdeflate.yml
vendored
9
.github/workflows/update-libdeflate.yml
vendored
@@ -50,11 +50,16 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
|
||||
9
.github/workflows/update-lolhtml.yml
vendored
9
.github/workflows/update-lolhtml.yml
vendored
@@ -50,11 +50,16 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
|
||||
9
.github/workflows/update-lshpack.yml
vendored
9
.github/workflows/update-lshpack.yml
vendored
@@ -50,11 +50,16 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -153,6 +153,7 @@ test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
tmp
|
||||
codegen-for-zig-team.tar.gz
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
|
||||
6
.vscode/launch.json
generated
vendored
6
.vscode/launch.json
generated
vendored
@@ -1118,7 +1118,11 @@
|
||||
"request": "attach",
|
||||
"name": "rr",
|
||||
"trace": "Off",
|
||||
"setupCommands": ["handle SIGPWR nostop noprint pass"],
|
||||
"setupCommands": [
|
||||
"handle SIGPWR nostop noprint pass",
|
||||
"source ${workspaceFolder}/misctools/gdb/std_gdb_pretty_printers.py",
|
||||
"source ${workspaceFolder}/misctools/gdb/zig_gdb_pretty_printers.py",
|
||||
],
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -30,7 +30,7 @@
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"],
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
@@ -146,6 +146,8 @@
|
||||
"*.mdc": "markdown",
|
||||
"array": "cpp",
|
||||
"ios": "cpp",
|
||||
"oxlint.json": "jsonc",
|
||||
"bun.lock": "jsonc",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
|
||||
@@ -53,39 +53,39 @@ $ brew install bun
|
||||
|
||||
## Install LLVM
|
||||
|
||||
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install llvm@18
|
||||
$ brew install llvm@19
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S llvm clang18 lld
|
||||
$ sudo pacman -S llvm clang lld
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install llvm18 clang18 lld18-devel
|
||||
$ sudo dnf install llvm clang lld-devel
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install clang18 lld18 llvm18
|
||||
$ sudo zypper install clang19 lld19 llvm19
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
|
||||
|
||||
Make sure Clang/LLVM 18 is in your path:
|
||||
Make sure Clang/LLVM 19 is in your path:
|
||||
|
||||
```bash
|
||||
$ which clang-18
|
||||
$ which clang-19
|
||||
```
|
||||
|
||||
If not, run this to manually add it:
|
||||
@@ -94,13 +94,13 @@ If not, run this to manually add it:
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
# use path+="$(brew --prefix llvm@18)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@18)/bin:$PATH"
|
||||
# use path+="$(brew --prefix llvm@19)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@19)/bin:$PATH"
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
# use fish_add_path if you're using fish
|
||||
$ export PATH="$PATH:/usr/lib/llvm18/bin"
|
||||
$ export PATH="$PATH:/usr/lib/llvm19/bin"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -134,6 +134,16 @@ We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-deb
|
||||
$ bun-debug
|
||||
```
|
||||
|
||||
## Running debug builds
|
||||
|
||||
The `bd` package.json script compiles and runs a debug build of Bun, only printing the output of the build process if it fails.
|
||||
|
||||
```sh
|
||||
$ bun bd <args>
|
||||
$ bun bd test foo.test.ts
|
||||
$ bun bd ./foo.ts
|
||||
```
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
|
||||
@@ -212,6 +222,9 @@ $ git -C vendor/WebKit checkout <commit_hash>
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
```
|
||||
@@ -250,7 +263,7 @@ The issue may manifest when initially running `bun setup` as Clang being unable
|
||||
```
|
||||
The C++ compiler
|
||||
|
||||
"/usr/bin/clang++-18"
|
||||
"/usr/bin/clang++-19"
|
||||
|
||||
is not able to compile a simple test program.
|
||||
```
|
||||
|
||||
6
Makefile
6
Makefile
@@ -1183,6 +1183,8 @@ jsc-copy-headers:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/GlobalCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GlobalCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ProgramCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ProgramCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
|
||||
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
|
||||
@@ -1234,6 +1236,8 @@ jsc-copy-headers-debug:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/GlobalCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GlobalCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ProgramCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ProgramCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
|
||||
find $(WEBKIT_DEBUG_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
|
||||
@@ -1392,7 +1396,7 @@ jsc-build-linux-compile-build-debug:
|
||||
cmake --build $(WEBKIT_DEBUG_DIR) --config Debug --target jsc
|
||||
|
||||
|
||||
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-copy
|
||||
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile
|
||||
jsc-build-mac-debug: jsc-force-fastjit jsc-build-mac-compile-debug
|
||||
|
||||
jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-copy
|
||||
|
||||
44
bench/crypto/aes-gcm-throughput.mjs
Normal file
44
bench/crypto/aes-gcm-throughput.mjs
Normal file
@@ -0,0 +1,44 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import crypto from "node:crypto";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
const keylen = { "aes-128-gcm": 16, "aes-192-gcm": 24, "aes-256-gcm": 32 };
|
||||
const sizes = [4 * 1024, 1024 * 1024];
|
||||
const ciphers = ["aes-128-gcm", "aes-192-gcm", "aes-256-gcm"];
|
||||
|
||||
const messages = {};
|
||||
sizes.forEach(size => {
|
||||
messages[size] = Buffer.alloc(size, "b");
|
||||
});
|
||||
|
||||
const keys = {};
|
||||
ciphers.forEach(cipher => {
|
||||
keys[cipher] = crypto.randomBytes(keylen[cipher]);
|
||||
});
|
||||
|
||||
// Fixed IV and AAD
|
||||
const iv = crypto.randomBytes(12);
|
||||
const associate_data = Buffer.alloc(16, "z");
|
||||
|
||||
for (const cipher of ciphers) {
|
||||
for (const size of sizes) {
|
||||
const message = messages[size];
|
||||
const key = keys[cipher];
|
||||
|
||||
bench(`${cipher} ${size / 1024}KB`, () => {
|
||||
const alice = crypto.createCipheriv(cipher, key, iv);
|
||||
alice.setAAD(associate_data);
|
||||
const enc = alice.update(message);
|
||||
alice.final();
|
||||
const tag = alice.getAuthTag();
|
||||
|
||||
const bob = crypto.createDecipheriv(cipher, key, iv);
|
||||
bob.setAuthTag(tag);
|
||||
bob.setAAD(associate_data);
|
||||
bob.update(enc);
|
||||
bob.final();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Enable latest features
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Enable latest features
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
|
||||
28
bench/snippets/redis-simple.mjs
Normal file
28
bench/snippets/redis-simple.mjs
Normal file
@@ -0,0 +1,28 @@
|
||||
import ioredis from "ioredis";
|
||||
|
||||
const redis = process.argv.includes("--redis=native")
|
||||
? Bun.redis
|
||||
: new ioredis("redis://localhost:6379", {
|
||||
enableAutoPipelining: true,
|
||||
});
|
||||
|
||||
const isBun = globalThis.Bun && redis === Bun.redis;
|
||||
for (let count of [100, 1000]) {
|
||||
function iterate() {
|
||||
const promises = new Array(count);
|
||||
for (let i = 0; i < count; i++) {
|
||||
promises[i] = redis.get("greeting");
|
||||
}
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
const label = isBun ? `Bun.redis` : `ioredis`;
|
||||
console.time(`GET 'greeting' batches of ${count} - ${label} (${count} iterations)`);
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await iterate();
|
||||
}
|
||||
console.timeEnd(`GET 'greeting' batches of ${count} - ${label} (${count} iterations)`);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
14
bench/snippets/request-method-getter.mjs
Normal file
14
bench/snippets/request-method-getter.mjs
Normal file
@@ -0,0 +1,14 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const url = "http://localhost:3000/";
|
||||
const clonable = new Request(url);
|
||||
|
||||
bench("request.clone().method", () => {
|
||||
return clonable.clone().method;
|
||||
});
|
||||
|
||||
bench("new Request(url).method", () => {
|
||||
return new Request(url).method;
|
||||
});
|
||||
|
||||
await run();
|
||||
314
build.zig
314
build.zig
@@ -4,7 +4,7 @@ const builtin = @import("builtin");
|
||||
const Build = std.Build;
|
||||
const Step = Build.Step;
|
||||
const Compile = Step.Compile;
|
||||
const LazyPath = Step.LazyPath;
|
||||
const LazyPath = Build.LazyPath;
|
||||
const Target = std.Target;
|
||||
const ResolvedTarget = std.Build.ResolvedTarget;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
@@ -18,21 +18,21 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
|
||||
|
||||
const pathRel = fs.path.relative;
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
|
||||
/// When updating this, make sure to adjust SetupZig.cmake
|
||||
const recommended_zig_version = "0.14.0";
|
||||
|
||||
comptime {
|
||||
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
|
||||
@compileError(
|
||||
"" ++
|
||||
"Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
|
||||
builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
|
||||
"CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
|
||||
"upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
|
||||
"comment this error out.",
|
||||
);
|
||||
}
|
||||
}
|
||||
// comptime {
|
||||
// if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
|
||||
// @compileError(
|
||||
// "" ++
|
||||
// "Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
|
||||
// builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
|
||||
// "CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
|
||||
// "upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
|
||||
// "comment this error out.",
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
const zero_sha = "0000000000000000000000000000000000000000";
|
||||
|
||||
@@ -93,6 +93,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
|
||||
const mod = opts.createModule();
|
||||
this.cached_options_module = mod;
|
||||
@@ -153,13 +154,6 @@ pub fn build(b: *Build) !void {
|
||||
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
|
||||
checked_file_exists = std.AutoHashMap(u64, void).init(b.allocator);
|
||||
|
||||
// TODO: Upgrade path for 0.14.0
|
||||
// b.graph.zig_lib_directory = brk: {
|
||||
// const sub_path = "vendor/zig/lib";
|
||||
// const dir = try b.build_root.handle.openDir(sub_path, .{});
|
||||
// break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) };
|
||||
// };
|
||||
|
||||
var target_query = b.standardTargetOptionsQueryOnly(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
@@ -205,10 +199,8 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
|
||||
|
||||
b.reference_trace = ref_trace: {
|
||||
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24;
|
||||
break :ref_trace if (trace == 0) null else trace;
|
||||
};
|
||||
// Lower the default reference trace for incremental
|
||||
b.reference_trace = b.reference_trace orelse if (b.graph.incremental == true) 8 else 16;
|
||||
|
||||
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
|
||||
|
||||
@@ -285,6 +277,40 @@ pub fn build(b: *Build) !void {
|
||||
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
|
||||
}
|
||||
|
||||
// zig build test
|
||||
{
|
||||
var step = b.step("test", "Build Bun's unit test suite");
|
||||
var o = build_options;
|
||||
var unit_tests = b.addTest(.{
|
||||
.name = "bun-test",
|
||||
.optimize = build_options.optimize,
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.test_runner = .{ .path = b.path("src/main_test.zig"), .mode = .simple },
|
||||
.target = build_options.target,
|
||||
.use_llvm = !build_options.no_llvm,
|
||||
.use_lld = if (build_options.os == .mac) false else !build_options.no_llvm,
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false,
|
||||
});
|
||||
configureObj(b, &o, unit_tests);
|
||||
// Setting `linker_allow_shlib_undefined` causes the linker to ignore
|
||||
// all undefined symbols. We want this because all we care about is the
|
||||
// object file Zig creates; we perform our own linking later. There is
|
||||
// currently no way to make a test build that only creates an object
|
||||
// file w/o creating an executable.
|
||||
//
|
||||
// See: https://github.com/ziglang/zig/issues/23374
|
||||
unit_tests.linker_allow_shlib_undefined = true;
|
||||
unit_tests.link_function_sections = true;
|
||||
unit_tests.link_data_sections = true;
|
||||
unit_tests.bundle_ubsan_rt = false;
|
||||
|
||||
const bin = unit_tests.getEmittedBin();
|
||||
const obj = bin.dirname().path(b, "bun-test.o");
|
||||
const cpy_obj = b.addInstallFile(obj, "bun-test.o");
|
||||
step.dependOn(&cpy_obj.step);
|
||||
}
|
||||
|
||||
// zig build windows-shim
|
||||
{
|
||||
var step = b.step("windows-shim", "Build the Windows shim (bun_shim_impl.exe + bun_shim_debug.exe)");
|
||||
@@ -308,6 +334,22 @@ pub fn build(b: *Build) !void {
|
||||
b.default_step.dependOn(step);
|
||||
}
|
||||
|
||||
// zig build watch
|
||||
// const enable_watch_step = b.option(bool, "watch_step", "Enable the watch step. This reads more files so it is off by default") orelse false;
|
||||
// if (no_llvm or enable_watch_step) {
|
||||
// self_hosted_watch.selfHostedExeBuild(b, &build_options) catch @panic("OOM");
|
||||
// }
|
||||
|
||||
// zig build check-debug
|
||||
{
|
||||
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
|
||||
// zig build check-all
|
||||
{
|
||||
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
@@ -361,7 +403,22 @@ pub fn build(b: *Build) !void {
|
||||
// zig build translate-c-headers
|
||||
{
|
||||
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
|
||||
step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step);
|
||||
for ([_]TargetDescription{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64, .musl = true },
|
||||
.{ .os = .linux, .arch = .aarch64, .musl = true },
|
||||
}) |t| {
|
||||
const resolved = t.resolveTarget(b);
|
||||
step.dependOn(
|
||||
&b.addInstallFile(getTranslateC(b, resolved, .Debug), b.fmt("translated-c-headers/{s}.zig", .{
|
||||
resolved.result.zigTriple(b.allocator) catch @panic("OOM"),
|
||||
})).step,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// zig build enum-extractor
|
||||
@@ -378,23 +435,32 @@ pub fn build(b: *Build) !void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addMultiCheck(
|
||||
const TargetDescription = struct {
|
||||
os: OperatingSystem,
|
||||
arch: Arch,
|
||||
musl: bool = false,
|
||||
|
||||
fn resolveTarget(desc: TargetDescription, b: *Build) std.Build.ResolvedTarget {
|
||||
return b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(desc.os),
|
||||
.cpu_arch = desc.arch,
|
||||
.cpu_model = getCpuModel(desc.os, desc.arch) orelse .determined_by_arch_os,
|
||||
.os_version_min = getOSVersionMin(desc.os),
|
||||
.glibc_version = if (desc.musl) null else getOSGlibCVersion(desc.os),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fn addMultiCheck(
|
||||
b: *Build,
|
||||
parent_step: *Step,
|
||||
root_build_options: BunBuildOptions,
|
||||
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
|
||||
to_check: []const TargetDescription,
|
||||
optimize: []const std.builtin.OptimizeMode,
|
||||
) void {
|
||||
for (to_check) |check| {
|
||||
for (optimize) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = if (check.musl) null else getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
const check_target = check.resolveTarget(b);
|
||||
var options: BunBuildOptions = .{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
@@ -418,7 +484,13 @@ pub fn addMultiCheck(
|
||||
}
|
||||
}
|
||||
|
||||
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
|
||||
fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) LazyPath {
|
||||
const target = b.resolveTargetQuery(q: {
|
||||
var query = initial_target.query;
|
||||
if (query.os_tag == .windows)
|
||||
query.abi = .gnu;
|
||||
break :q query;
|
||||
});
|
||||
const translate_c = b.addTranslateC(.{
|
||||
.root_source_file = b.path("src/c-headers-for-zig.h"),
|
||||
.target = target,
|
||||
@@ -434,28 +506,72 @@ fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.buil
|
||||
const str, const value = entry;
|
||||
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
|
||||
}
|
||||
return translate_c;
|
||||
|
||||
if (target.result.os.tag == .windows) {
|
||||
// translate-c is unable to translate the unsuffixed windows functions
|
||||
// like `SetCurrentDirectory` since they are defined with an odd macro
|
||||
// that translate-c doesn't handle.
|
||||
//
|
||||
// #define SetCurrentDirectory __MINGW_NAME_AW(SetCurrentDirectory)
|
||||
//
|
||||
// In these cases, it's better to just reference the underlying function
|
||||
// directly: SetCurrentDirectoryW. To make the error better, a post
|
||||
// processing step is applied to the translate-c file.
|
||||
//
|
||||
// Additionally, this step makes it so that decls like NTSTATUS and
|
||||
// HANDLE point to the standard library structures.
|
||||
const helper_exe = b.addExecutable(.{
|
||||
.name = "process_windows_translate_c",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/codegen/process_windows_translate_c.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
}),
|
||||
});
|
||||
const in = translate_c.getOutput();
|
||||
const run = b.addRunArtifact(helper_exe);
|
||||
run.addFileArg(in);
|
||||
const out = run.addOutputFileArg("c-headers-for-zig.zig");
|
||||
return out;
|
||||
}
|
||||
return translate_c.getOutput();
|
||||
}
|
||||
|
||||
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
const obj = b.addObject(.{
|
||||
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
|
||||
.root_source_file = switch (opts.os) {
|
||||
.wasm => b.path("root_wasm.zig"),
|
||||
else => b.path("src/main.zig"),
|
||||
// else => b.path("root_css.zig"),
|
||||
},
|
||||
// Create `@import("bun")`, containing most of Bun's code.
|
||||
const bun = b.createModule(.{
|
||||
.root_source_file = b.path("src/bun.zig"),
|
||||
});
|
||||
bun.addImport("bun", bun); // allow circular "bun" import
|
||||
addInternalImports(b, bun, opts);
|
||||
|
||||
const root = b.createModule(.{
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
|
||||
// Root module gets compilation flags. Forwarded as default to dependencies.
|
||||
.target = opts.target,
|
||||
.optimize = opts.optimize,
|
||||
.use_llvm = !opts.no_llvm,
|
||||
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
|
||||
|
||||
// https://github.com/ziglang/zig/issues/17430
|
||||
.pic = true,
|
||||
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false, // stripped at the end
|
||||
});
|
||||
root.addImport("bun", bun);
|
||||
|
||||
const obj = b.addObject(.{
|
||||
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
|
||||
.root_module = root,
|
||||
});
|
||||
configureObj(b, opts, obj);
|
||||
return obj;
|
||||
}
|
||||
|
||||
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
// Flags on root module get used for the compilation
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
obj.root_module.strip = false; // stripped at the end
|
||||
// https://github.com/ziglang/zig/issues/17430
|
||||
obj.root_module.pic = true;
|
||||
|
||||
// Object options
|
||||
obj.use_llvm = !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
|
||||
if (opts.enable_asan) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
obj.root_module.sanitize_address = true;
|
||||
@@ -465,7 +581,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
}
|
||||
}
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
obj.bundle_ubsan_rt = false;
|
||||
|
||||
// Link libc
|
||||
if (opts.os != .wasm) {
|
||||
@@ -475,6 +591,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
|
||||
// Disable stack probing on x86 so we don't need to include compiler_rt
|
||||
if (opts.arch.isX86()) {
|
||||
// TODO: enable on debug please.
|
||||
obj.root_module.stack_check = false;
|
||||
obj.root_module.stack_protector = false;
|
||||
}
|
||||
@@ -489,17 +606,18 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
obj.root_module.valgrind = true;
|
||||
}
|
||||
}
|
||||
addInternalPackages(b, obj, opts);
|
||||
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
|
||||
|
||||
const translate_c = getTranslateC(b, opts.target, opts.optimize);
|
||||
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
const ObjectFormat = enum {
|
||||
/// Emitting LLVM bc files could allow a stronger LTO pass, however it
|
||||
/// doesn't yet work. It is left accessible with `-Dobj_format=bc` or in
|
||||
/// CMake with `-DZIG_OBJECT_FORMAT=bc`.
|
||||
///
|
||||
/// To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
|
||||
/// LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
|
||||
/// Change to "bc" to experiment, "Invalid record" means it is not valid output.
|
||||
bc,
|
||||
/// Emit a .o / .obj file for the bun-zig object.
|
||||
obj,
|
||||
};
|
||||
|
||||
@@ -529,16 +647,21 @@ fn exists(path: []const u8) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
const os = opts.os;
|
||||
|
||||
mod.addImport("build_options", opts.buildOptionsModule(b));
|
||||
|
||||
const translate_c = getTranslateC(b, opts.target, opts.optimize);
|
||||
mod.addImport("translated-c-headers", b.createModule(.{ .root_source_file = translate_c }));
|
||||
|
||||
const zlib_internal_path = switch (os) {
|
||||
.windows => "src/deps/zlib.win32.zig",
|
||||
.linux, .mac => "src/deps/zlib.posix.zig",
|
||||
else => null,
|
||||
};
|
||||
if (zlib_internal_path) |path| {
|
||||
obj.root_module.addAnonymousImport("zlib-internal", .{
|
||||
mod.addAnonymousImport("zlib-internal", .{
|
||||
.root_source_file = b.path(path),
|
||||
});
|
||||
}
|
||||
@@ -548,7 +671,7 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
.windows => "src/async/windows_event_loop.zig",
|
||||
else => "src/async/stub_event_loop.zig",
|
||||
};
|
||||
obj.root_module.addAnonymousImport("async", .{
|
||||
mod.addAnonymousImport("async", .{
|
||||
.root_source_file = b.path(async_path),
|
||||
});
|
||||
|
||||
@@ -596,7 +719,7 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
entry.import
|
||||
else
|
||||
entry.file;
|
||||
obj.root_module.addAnonymousImport(import_path, .{
|
||||
mod.addAnonymousImport(import_path, .{
|
||||
.root_source_file = .{ .cwd_relative = path },
|
||||
});
|
||||
}
|
||||
@@ -606,16 +729,37 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
.{ .import = "completions-fish", .file = b.path("completions/bun.fish") },
|
||||
}) |entry| {
|
||||
obj.root_module.addAnonymousImport(entry.import, .{
|
||||
mod.addAnonymousImport(entry.import, .{
|
||||
.root_source_file = entry.file,
|
||||
});
|
||||
}
|
||||
|
||||
if (os == .windows) {
|
||||
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
|
||||
mod.addAnonymousImport("bun_shim_impl.exe", .{
|
||||
.root_source_file = opts.windowsShim(b).exe.getEmittedBin(),
|
||||
});
|
||||
}
|
||||
|
||||
// Finally, make it so all modules share the same import table.
|
||||
propagateImports(mod) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Makes all imports of `source_mod` visible to all of its dependencies.
|
||||
/// Does not replace existing imports.
|
||||
fn propagateImports(source_mod: *Module) !void {
|
||||
var seen = std.AutoHashMap(*Module, void).init(source_mod.owner.graph.arena);
|
||||
defer seen.deinit();
|
||||
var queue = std.ArrayList(*Module).init(source_mod.owner.graph.arena);
|
||||
defer queue.deinit();
|
||||
try queue.appendSlice(source_mod.import_table.values());
|
||||
while (queue.pop()) |mod| {
|
||||
if ((try seen.getOrPut(mod)).found_existing) continue;
|
||||
try queue.appendSlice(mod.import_table.values());
|
||||
|
||||
for (source_mod.import_table.keys(), source_mod.import_table.values()) |k, v|
|
||||
if (mod.import_table.get(k) == null)
|
||||
mod.addImport(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
fn validateGeneratedPath(path: []const u8) void {
|
||||
@@ -644,30 +788,34 @@ const WindowsShim = struct {
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "bun_shim_impl",
|
||||
.root_source_file = path,
|
||||
.target = target,
|
||||
.optimize = .ReleaseFast,
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = path,
|
||||
.target = target,
|
||||
.optimize = .ReleaseFast,
|
||||
.unwind_tables = .none,
|
||||
.omit_frame_pointer = true,
|
||||
.strip = true,
|
||||
.sanitize_thread = false,
|
||||
.single_threaded = true,
|
||||
.link_libc = false,
|
||||
}),
|
||||
.linkage = .static,
|
||||
.use_llvm = true,
|
||||
.use_lld = true,
|
||||
.unwind_tables = .none,
|
||||
.omit_frame_pointer = true,
|
||||
.strip = true,
|
||||
.linkage = .static,
|
||||
.sanitize_thread = false,
|
||||
.single_threaded = true,
|
||||
.link_libc = false,
|
||||
});
|
||||
|
||||
const dbg = b.addExecutable(.{
|
||||
.name = "bun_shim_debug",
|
||||
.root_source_file = path,
|
||||
.target = target,
|
||||
.optimize = .Debug,
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = path,
|
||||
.target = target,
|
||||
.optimize = .Debug,
|
||||
.single_threaded = true,
|
||||
.link_libc = false,
|
||||
}),
|
||||
.linkage = .static,
|
||||
.use_llvm = true,
|
||||
.use_lld = true,
|
||||
.linkage = .static,
|
||||
.single_threaded = true,
|
||||
.link_libc = false,
|
||||
});
|
||||
|
||||
return .{ .exe = exe, .dbg = dbg };
|
||||
|
||||
7
bun.lock
7
bun.lock
@@ -29,7 +29,6 @@
|
||||
"name": "bun-types",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@types/ws": "~8.5.10",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.5.3",
|
||||
@@ -165,8 +164,6 @@
|
||||
|
||||
"@types/semver": ["@types/semver@7.5.8", "", {}, "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ=="],
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w=="],
|
||||
|
||||
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@7.16.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/type-utils": "7.16.1", "@typescript-eslint/utils": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "@typescript-eslint/parser": "^7.0.0", "eslint": "^8.56.0" } }, "sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A=="],
|
||||
|
||||
"@typescript-eslint/parser": ["@typescript-eslint/parser@7.16.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/types": "7.16.1", "@typescript-eslint/typescript-estree": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA=="],
|
||||
@@ -915,8 +912,6 @@
|
||||
|
||||
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
|
||||
|
||||
"@types/ws/@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
|
||||
|
||||
"@typescript-eslint/visitor-keys/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
|
||||
@@ -1007,8 +1002,6 @@
|
||||
|
||||
"@definitelytyped/utils/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
|
||||
|
||||
"@types/ws/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
|
||||
|
||||
"are-we-there-yet/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],
|
||||
|
||||
@@ -142,6 +142,14 @@ if(UNIX)
|
||||
-fno-unwind-tables
|
||||
-fno-asynchronous-unwind-tables
|
||||
)
|
||||
|
||||
# needed for libuv stubs because they use
|
||||
# C23 feature which lets you define parameter without
|
||||
# name
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Allow C23 extensions"
|
||||
-Wno-c23-extensions
|
||||
)
|
||||
endif()
|
||||
|
||||
register_compiler_flags(
|
||||
|
||||
@@ -423,7 +423,7 @@ function(register_command)
|
||||
# libbun-profile.a is now over 5gb in size, compress it first
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -6 libbun-profile.a)
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-profile.a)
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
|
||||
else()
|
||||
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
|
||||
@@ -633,7 +633,7 @@ function(register_repository)
|
||||
set(GIT_PATH ${VENDOR_PATH}/${GIT_NAME})
|
||||
endif()
|
||||
|
||||
set(GIT_EFFECTIVE_OUTPUTS)
|
||||
set(GIT_EFFECTIVE_OUTPUTS ${GIT_PATH}/.ref)
|
||||
foreach(output ${GIT_OUTPUTS})
|
||||
list(APPEND GIT_EFFECTIVE_OUTPUTS ${GIT_PATH}/${output})
|
||||
endforeach()
|
||||
@@ -751,11 +751,17 @@ function(register_cmake_command)
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
|
||||
endif()
|
||||
|
||||
set(MAKE_SOURCES)
|
||||
if(TARGET clone-${MAKE_TARGET})
|
||||
list(APPEND MAKE_SOURCES ${MAKE_CWD}/.ref)
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
COMMENT "Configuring ${MAKE_TARGET}"
|
||||
TARGET configure-${MAKE_TARGET}
|
||||
COMMAND ${CMAKE_COMMAND} ${MAKE_EFFECTIVE_ARGS}
|
||||
CWD ${MAKE_CWD}
|
||||
SOURCES ${MAKE_SOURCES}
|
||||
OUTPUTS ${MAKE_BUILD_PATH}/CMakeCache.txt
|
||||
)
|
||||
|
||||
@@ -807,6 +813,7 @@ function(register_cmake_command)
|
||||
TARGETS configure-${MAKE_TARGET}
|
||||
COMMAND ${CMAKE_COMMAND} ${MAKE_BUILD_ARGS}
|
||||
CWD ${MAKE_CWD}
|
||||
SOURCES ${MAKE_SOURCES}
|
||||
ARTIFACTS ${MAKE_ARTIFACTS}
|
||||
)
|
||||
|
||||
|
||||
@@ -26,6 +26,15 @@ else()
|
||||
setx(DEBUG OFF)
|
||||
endif()
|
||||
|
||||
optionx(BUN_TEST BOOL "Build Bun's unit test suite instead of the normal build" DEFAULT OFF)
|
||||
|
||||
if (BUN_TEST)
|
||||
setx(TEST ON)
|
||||
else()
|
||||
setx(TEST OFF)
|
||||
endif()
|
||||
|
||||
|
||||
if(CMAKE_BUILD_TYPE MATCHES "MinSizeRel")
|
||||
setx(ENABLE_SMOL ON)
|
||||
endif()
|
||||
@@ -62,7 +71,14 @@ if(ARCH STREQUAL "x64")
|
||||
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
|
||||
# Disabling logs by default for tests yields faster builds
|
||||
if (DEBUG AND NOT TEST)
|
||||
set(DEFAULT_ENABLE_LOGS ON)
|
||||
else()
|
||||
set(DEFAULT_ENABLE_LOGS OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEFAULT_ENABLE_LOGS})
|
||||
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
|
||||
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
|
||||
|
||||
@@ -29,6 +29,9 @@ else()
|
||||
endif()
|
||||
|
||||
set(ZIG_NAME bootstrap-${ZIG_ARCH}-${ZIG_OS_ABI})
|
||||
if(ZIG_COMPILER_SAFE)
|
||||
set(ZIG_NAME ${ZIG_NAME}-ReleaseSafe)
|
||||
endif()
|
||||
set(ZIG_FILENAME ${ZIG_NAME}.zip)
|
||||
|
||||
if(CMAKE_HOST_WIN32)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
914b005ef3ece44159dca0ffad74eb42a9f6679f
|
||||
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -12,6 +12,10 @@ else()
|
||||
set(bunStrip bun)
|
||||
endif()
|
||||
|
||||
if(TEST)
|
||||
set(bun ${bun}-test)
|
||||
endif()
|
||||
|
||||
set(bunExe ${bun}${CMAKE_EXECUTABLE_SUFFIX})
|
||||
|
||||
if(bunStrip)
|
||||
@@ -528,7 +532,6 @@ file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS}
|
||||
|
||||
list(APPEND BUN_ZIG_SOURCES
|
||||
${CWD}/build.zig
|
||||
${CWD}/src/main.zig
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
@@ -550,7 +553,13 @@ else()
|
||||
list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_BAKE_RUNTIME_OUTPUTS})
|
||||
endif()
|
||||
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
if (TEST)
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-test.o)
|
||||
set(ZIG_STEPS test)
|
||||
else()
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
set(ZIG_STEPS obj)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
@@ -579,10 +588,10 @@ register_command(
|
||||
GROUP
|
||||
console
|
||||
COMMENT
|
||||
"Building src/*.zig for ${ZIG_TARGET}"
|
||||
"Building src/*.zig into ${BUN_ZIG_OUTPUT} for ${ZIG_TARGET}"
|
||||
COMMAND
|
||||
${ZIG_EXECUTABLE}
|
||||
build obj
|
||||
build ${ZIG_STEPS}
|
||||
${CMAKE_ZIG_FLAGS}
|
||||
--prefix ${BUILD_PATH}
|
||||
-Dobj_format=${ZIG_OBJECT_FORMAT}
|
||||
@@ -596,6 +605,7 @@ register_command(
|
||||
-Dcodegen_path=${CODEGEN_PATH}
|
||||
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
|
||||
--prominent-compile-errors
|
||||
--summary all
|
||||
${ZIG_FLAGS_BUN}
|
||||
ARTIFACTS
|
||||
${BUN_ZIG_OUTPUT}
|
||||
@@ -622,6 +632,7 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/bun.js/bindings/sqlite/*.cpp
|
||||
${CWD}/src/bun.js/bindings/webcrypto/*.cpp
|
||||
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
|
||||
${CWD}/src/bun.js/bindings/node/*.cpp
|
||||
${CWD}/src/bun.js/bindings/node/crypto/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
|
||||
@@ -635,6 +646,8 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${BUN_USOCKETS_SOURCE}/src/eventing/*.c
|
||||
${BUN_USOCKETS_SOURCE}/src/internal/*.c
|
||||
${BUN_USOCKETS_SOURCE}/src/crypto/*.c
|
||||
${CWD}/src/bun.js/bindings/uv-posix-polyfills.c
|
||||
${CWD}/src/bun.js/bindings/uv-posix-stubs.c
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -785,6 +798,10 @@ target_include_directories(${bun} PRIVATE
|
||||
${NODEJS_HEADERS_PATH}/include
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/libuv)
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
include(CheckIncludeFiles)
|
||||
check_include_files("sys/queue.h" HAVE_SYS_QUEUE_H)
|
||||
@@ -893,6 +910,7 @@ if(NOT WIN32)
|
||||
-Werror=sometimes-uninitialized
|
||||
-Werror=unused
|
||||
-Wno-unused-function
|
||||
-Wno-c++23-lambda-attributes
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
)
|
||||
@@ -909,6 +927,7 @@ if(NOT WIN32)
|
||||
-Werror=nonnull
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Wno-c++23-lambda-attributes
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
)
|
||||
@@ -995,6 +1014,10 @@ if(LINUX)
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
# enable string tail merging
|
||||
-Wl,-O2
|
||||
# make debug info faster to load
|
||||
-Wl,--gdb-index
|
||||
-Wl,-z,combreloc
|
||||
-Wl,--no-eh-frame-hdr
|
||||
-Wl,--sort-section=name
|
||||
@@ -1071,6 +1094,7 @@ set(BUN_DEPENDENCIES
|
||||
BoringSSL
|
||||
Brotli
|
||||
Cares
|
||||
Highway
|
||||
LibDeflate
|
||||
LolHtml
|
||||
Lshpack
|
||||
|
||||
33
cmake/targets/BuildHighway.cmake
Normal file
33
cmake/targets/BuildHighway.cmake
Normal file
@@ -0,0 +1,33 @@
|
||||
register_repository(
|
||||
NAME
|
||||
highway
|
||||
REPOSITORY
|
||||
google/highway
|
||||
COMMIT
|
||||
12b325bc1793dee68ab2157995a690db859fe9e0
|
||||
)
|
||||
|
||||
set(HIGHWAY_CMAKE_ARGS
|
||||
# Build a static library
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
# Enable position-independent code for linking into the main executable
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
# Disable unnecessary components
|
||||
-DHWY_ENABLE_TESTS=OFF
|
||||
-DHWY_ENABLE_EXAMPLES=OFF
|
||||
-DHWY_ENABLE_CONTRIB=OFF
|
||||
# Disable building of the install target
|
||||
-DHWY_ENABLE_INSTALL=OFF
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
highway
|
||||
LIBRARIES
|
||||
hwy
|
||||
ARGS
|
||||
${HIGHWAY_CMAKE_ARGS}
|
||||
INCLUDES
|
||||
.
|
||||
hwy
|
||||
)
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
ebiggers/libdeflate
|
||||
COMMIT
|
||||
733848901289eca058804ca0737f8796875204c8
|
||||
78051988f96dc8d8916310d8b24021f01bd9e102
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 91bf2baced1b1309c7e05f19177c97fefec20976)
|
||||
set(WEBKIT_VERSION 53d4176ddc98ba721e50355826f58ec758766fa8)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "cd1995944508e4c946deb75bd70947d302e0db37")
|
||||
set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -50,6 +50,7 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
|
||||
|
||||
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
|
||||
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT ${BUILDKITE})
|
||||
|
||||
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
|
||||
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})
|
||||
@@ -78,6 +79,7 @@ register_command(
|
||||
-DZIG_PATH=${ZIG_PATH}
|
||||
-DZIG_COMMIT=${ZIG_COMMIT}
|
||||
-DENABLE_ASAN=${ENABLE_ASAN}
|
||||
-DZIG_COMPILER_SAFE=${ZIG_COMPILER_SAFE}
|
||||
-P ${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
SOURCES
|
||||
${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
|
||||
@@ -55,7 +55,7 @@ RUN apt-get update -qq \
|
||||
&& which bun \
|
||||
&& bun --version
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
|
||||
@@ -56,7 +56,7 @@ RUN apt-get update -qq \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun
|
||||
|
||||
FROM debian:bullseye
|
||||
FROM debian:bookworm
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
449
docs/api/cookie.md
Normal file
449
docs/api/cookie.md
Normal file
@@ -0,0 +1,449 @@
|
||||
Bun provides native APIs for working with HTTP cookies through `Bun.Cookie` and `Bun.CookieMap`. These APIs offer fast, easy-to-use methods for parsing, generating, and manipulating cookies in HTTP requests and responses.
|
||||
|
||||
## CookieMap class
|
||||
|
||||
`Bun.CookieMap` provides a Map-like interface for working with collections of cookies. It implements the `Iterable` interface, allowing you to use it with `for...of` loops and other iteration methods.
|
||||
|
||||
```ts
|
||||
// Empty cookie map
|
||||
const cookies = new Bun.CookieMap();
|
||||
|
||||
// From a cookie string
|
||||
const cookies1 = new Bun.CookieMap("name=value; foo=bar");
|
||||
|
||||
// From an object
|
||||
const cookies2 = new Bun.CookieMap({
|
||||
session: "abc123",
|
||||
theme: "dark",
|
||||
});
|
||||
|
||||
// From an array of name/value pairs
|
||||
const cookies3 = new Bun.CookieMap([
|
||||
["session", "abc123"],
|
||||
["theme", "dark"],
|
||||
]);
|
||||
```
|
||||
|
||||
### In HTTP servers
|
||||
|
||||
In Bun's HTTP server, the `cookies` property on the request object (in `routes`) is an instance of `CookieMap`:
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
routes: {
|
||||
"/": req => {
|
||||
// Access request cookies
|
||||
const cookies = req.cookies;
|
||||
|
||||
// Get a specific cookie
|
||||
const sessionCookie = cookies.get("session");
|
||||
if (sessionCookie != null) {
|
||||
console.log(sessionCookie);
|
||||
}
|
||||
|
||||
// Check if a cookie exists
|
||||
if (cookies.has("theme")) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Set a cookie, it will be automatically applied to the response
|
||||
cookies.set("visited", "true");
|
||||
|
||||
return new Response("Hello");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log("Server listening at: " + server.url);
|
||||
```
|
||||
|
||||
### Methods
|
||||
|
||||
#### `get(name: string): string | null`
|
||||
|
||||
Retrieves a cookie by name. Returns `null` if the cookie doesn't exist.
|
||||
|
||||
```ts
|
||||
// Get by name
|
||||
const cookie = cookies.get("session");
|
||||
|
||||
if (cookie != null) {
|
||||
console.log(cookie);
|
||||
}
|
||||
```
|
||||
|
||||
#### `has(name: string): boolean`
|
||||
|
||||
Checks if a cookie with the given name exists.
|
||||
|
||||
```ts
|
||||
// Check if cookie exists
|
||||
if (cookies.has("session")) {
|
||||
// Cookie exists
|
||||
}
|
||||
```
|
||||
|
||||
#### `set(name: string, value: string): void`
|
||||
|
||||
#### `set(options: CookieInit): void`
|
||||
|
||||
#### `set(cookie: Cookie): void`
|
||||
|
||||
Adds or updates a cookie in the map. Cookies default to `{ path: "/", sameSite: "lax" }`.
|
||||
|
||||
```ts
|
||||
// Set by name and value
|
||||
cookies.set("session", "abc123");
|
||||
|
||||
// Set using options object
|
||||
cookies.set({
|
||||
name: "theme",
|
||||
value: "dark",
|
||||
maxAge: 3600,
|
||||
secure: true,
|
||||
});
|
||||
|
||||
// Set using Cookie instance
|
||||
const cookie = new Bun.Cookie("visited", "true");
|
||||
cookies.set(cookie);
|
||||
```
|
||||
|
||||
#### `delete(name: string): void`
|
||||
|
||||
#### `delete(options: CookieStoreDeleteOptions): void`
|
||||
|
||||
Removes a cookie from the map. When applied to a Response, this adds a cookie with an empty string value and an expiry date in the past. A cookie will only delete successfully on the browser if the domain and path is the same as it was when the cookie was created.
|
||||
|
||||
```ts
|
||||
// Delete by name using default domain and path.
|
||||
cookies.delete("session");
|
||||
|
||||
// Delete with domain/path options.
|
||||
cookies.delete({
|
||||
name: "session",
|
||||
domain: "example.com",
|
||||
path: "/admin",
|
||||
});
|
||||
```
|
||||
|
||||
#### `toJSON(): Record<string, string>`
|
||||
|
||||
Converts the cookie map to a serializable format.
|
||||
|
||||
```ts
|
||||
const json = cookies.toJSON();
|
||||
```
|
||||
|
||||
#### `toSetCookieHeaders(): string[]`
|
||||
|
||||
Returns an array of values for Set-Cookie headers that can be used to apply all cookie changes.
|
||||
|
||||
When using `Bun.serve()`, you don't need to call this method explicitly. Any changes made to the `req.cookies` map are automatically applied to the response headers. This method is primarily useful when working with other HTTP server implementations.
|
||||
|
||||
```js
|
||||
import { createServer } from "node:http";
|
||||
import { CookieMap } from "bun";
|
||||
|
||||
const server = createServer((req, res) => {
|
||||
const cookieHeader = req.headers.cookie || "";
|
||||
const cookies = new CookieMap(cookieHeader);
|
||||
|
||||
cookies.set("view-count", Number(cookies.get("view-count") || "0") + 1);
|
||||
cookies.delete("session");
|
||||
|
||||
res.writeHead(200, {
|
||||
"Content-Type": "text/plain",
|
||||
"Set-Cookie": cookies.toSetCookieHeaders(),
|
||||
});
|
||||
res.end(`Found ${cookies.size} cookies`);
|
||||
});
|
||||
|
||||
server.listen(3000, () => {
|
||||
console.log("Server running at http://localhost:3000/");
|
||||
});
|
||||
```
|
||||
|
||||
### Iteration
|
||||
|
||||
`CookieMap` provides several methods for iteration:
|
||||
|
||||
```ts
|
||||
// Iterate over [name, cookie] entries
|
||||
for (const [name, value] of cookies) {
|
||||
console.log(`${name}: ${value}`);
|
||||
}
|
||||
|
||||
// Using entries()
|
||||
for (const [name, value] of cookies.entries()) {
|
||||
console.log(`${name}: ${value}`);
|
||||
}
|
||||
|
||||
// Using keys()
|
||||
for (const name of cookies.keys()) {
|
||||
console.log(name);
|
||||
}
|
||||
|
||||
// Using values()
|
||||
for (const value of cookies.values()) {
|
||||
console.log(value);
|
||||
}
|
||||
|
||||
// Using forEach
|
||||
cookies.forEach((value, name) => {
|
||||
console.log(`${name}: ${value}`);
|
||||
});
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
#### `size: number`
|
||||
|
||||
Returns the number of cookies in the map.
|
||||
|
||||
```ts
|
||||
console.log(cookies.size); // Number of cookies
|
||||
```
|
||||
|
||||
## Cookie class
|
||||
|
||||
`Bun.Cookie` represents an HTTP cookie with its name, value, and attributes.
|
||||
|
||||
```ts
|
||||
import { Cookie } from "bun";
|
||||
|
||||
// Create a basic cookie
|
||||
const cookie = new Bun.Cookie("name", "value");
|
||||
|
||||
// Create a cookie with options
|
||||
const secureSessionCookie = new Bun.Cookie("session", "abc123", {
|
||||
domain: "example.com",
|
||||
path: "/admin",
|
||||
expires: new Date(Date.now() + 86400000), // 1 day
|
||||
httpOnly: true,
|
||||
secure: true,
|
||||
sameSite: "strict",
|
||||
});
|
||||
|
||||
// Parse from a cookie string
|
||||
const parsedCookie = new Bun.Cookie("name=value; Path=/; HttpOnly");
|
||||
|
||||
// Create from an options object
|
||||
const objCookie = new Bun.Cookie({
|
||||
name: "theme",
|
||||
value: "dark",
|
||||
maxAge: 3600,
|
||||
secure: true,
|
||||
});
|
||||
```
|
||||
|
||||
### Constructors
|
||||
|
||||
```ts
|
||||
// Basic constructor with name/value
|
||||
new Bun.Cookie(name: string, value: string);
|
||||
|
||||
// Constructor with name, value, and options
|
||||
new Bun.Cookie(name: string, value: string, options: CookieInit);
|
||||
|
||||
// Constructor from cookie string
|
||||
new Bun.Cookie(cookieString: string);
|
||||
|
||||
// Constructor from cookie object
|
||||
new Bun.Cookie(options: CookieInit);
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
```ts
|
||||
cookie.name; // string - Cookie name
|
||||
cookie.value; // string - Cookie value
|
||||
cookie.domain; // string | null - Domain scope (null if not specified)
|
||||
cookie.path; // string - URL path scope (defaults to "/")
|
||||
cookie.expires; // number | undefined - Expiration timestamp (ms since epoch)
|
||||
cookie.secure; // boolean - Require HTTPS
|
||||
cookie.sameSite; // "strict" | "lax" | "none" - SameSite setting
|
||||
cookie.partitioned; // boolean - Whether the cookie is partitioned (CHIPS)
|
||||
cookie.maxAge; // number | undefined - Max age in seconds
|
||||
cookie.httpOnly; // boolean - Accessible only via HTTP (not JavaScript)
|
||||
```
|
||||
|
||||
### Methods
|
||||
|
||||
#### `isExpired(): boolean`
|
||||
|
||||
Checks if the cookie has expired.
|
||||
|
||||
```ts
|
||||
// Expired cookie (Date in the past)
|
||||
const expiredCookie = new Bun.Cookie("name", "value", {
|
||||
expires: new Date(Date.now() - 1000),
|
||||
});
|
||||
console.log(expiredCookie.isExpired()); // true
|
||||
|
||||
// Valid cookie (Using maxAge instead of expires)
|
||||
const validCookie = new Bun.Cookie("name", "value", {
|
||||
maxAge: 3600, // 1 hour in seconds
|
||||
});
|
||||
console.log(validCookie.isExpired()); // false
|
||||
|
||||
// Session cookie (no expiration)
|
||||
const sessionCookie = new Bun.Cookie("name", "value");
|
||||
console.log(sessionCookie.isExpired()); // false
|
||||
```
|
||||
|
||||
#### `serialize(): string`
|
||||
|
||||
#### `toString(): string`
|
||||
|
||||
Returns a string representation of the cookie suitable for a `Set-Cookie` header.
|
||||
|
||||
```ts
|
||||
const cookie = new Bun.Cookie("session", "abc123", {
|
||||
domain: "example.com",
|
||||
path: "/admin",
|
||||
expires: new Date(Date.now() + 86400000),
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
sameSite: "strict",
|
||||
});
|
||||
|
||||
console.log(cookie.serialize());
|
||||
// => "session=abc123; Domain=example.com; Path=/admin; Expires=Sun, 19 Mar 2025 15:03:26 GMT; Secure; HttpOnly; SameSite=strict"
|
||||
console.log(cookie.toString());
|
||||
// => "session=abc123; Domain=example.com; Path=/admin; Expires=Sun, 19 Mar 2025 15:03:26 GMT; Secure; HttpOnly; SameSite=strict"
|
||||
```
|
||||
|
||||
#### `toJSON(): CookieInit`
|
||||
|
||||
Converts the cookie to a plain object suitable for JSON serialization.
|
||||
|
||||
```ts
|
||||
const cookie = new Bun.Cookie("session", "abc123", {
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
});
|
||||
|
||||
const json = cookie.toJSON();
|
||||
// => {
|
||||
// name: "session",
|
||||
// value: "abc123",
|
||||
// path: "/",
|
||||
// secure: true,
|
||||
// httpOnly: true,
|
||||
// sameSite: "lax",
|
||||
// partitioned: false
|
||||
// }
|
||||
|
||||
// Works with JSON.stringify
|
||||
const jsonString = JSON.stringify(cookie);
|
||||
```
|
||||
|
||||
### Static methods
|
||||
|
||||
#### `Cookie.parse(cookieString: string): Cookie`
|
||||
|
||||
Parses a cookie string into a `Cookie` instance.
|
||||
|
||||
```ts
|
||||
const cookie = Bun.Cookie.parse("name=value; Path=/; Secure; SameSite=Lax");
|
||||
|
||||
console.log(cookie.name); // "name"
|
||||
console.log(cookie.value); // "value"
|
||||
console.log(cookie.path); // "/"
|
||||
console.log(cookie.secure); // true
|
||||
console.log(cookie.sameSite); // "lax"
|
||||
```
|
||||
|
||||
#### `Cookie.from(name: string, value: string, options?: CookieInit): Cookie`
|
||||
|
||||
Factory method to create a cookie.
|
||||
|
||||
```ts
|
||||
const cookie = Bun.Cookie.from("session", "abc123", {
|
||||
httpOnly: true,
|
||||
secure: true,
|
||||
maxAge: 3600,
|
||||
});
|
||||
```
|
||||
|
||||
## Types
|
||||
|
||||
```ts
|
||||
interface CookieInit {
|
||||
name?: string;
|
||||
value?: string;
|
||||
domain?: string;
|
||||
/** Defaults to '/'. To allow the browser to set the path, use an empty string. */
|
||||
path?: string;
|
||||
expires?: number | Date | string;
|
||||
secure?: boolean;
|
||||
/** Defaults to `lax`. */
|
||||
sameSite?: CookieSameSite;
|
||||
httpOnly?: boolean;
|
||||
partitioned?: boolean;
|
||||
maxAge?: number;
|
||||
}
|
||||
|
||||
interface CookieStoreDeleteOptions {
|
||||
name: string;
|
||||
domain?: string | null;
|
||||
path?: string;
|
||||
}
|
||||
|
||||
interface CookieStoreGetOptions {
|
||||
name?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
type CookieSameSite = "strict" | "lax" | "none";
|
||||
|
||||
class Cookie {
|
||||
constructor(name: string, value: string, options?: CookieInit);
|
||||
constructor(cookieString: string);
|
||||
constructor(cookieObject?: CookieInit);
|
||||
|
||||
readonly name: string;
|
||||
value: string;
|
||||
domain?: string;
|
||||
path: string;
|
||||
expires?: Date;
|
||||
secure: boolean;
|
||||
sameSite: CookieSameSite;
|
||||
partitioned: boolean;
|
||||
maxAge?: number;
|
||||
httpOnly: boolean;
|
||||
|
||||
isExpired(): boolean;
|
||||
|
||||
serialize(): string;
|
||||
toString(): string;
|
||||
toJSON(): CookieInit;
|
||||
|
||||
static parse(cookieString: string): Cookie;
|
||||
static from(name: string, value: string, options?: CookieInit): Cookie;
|
||||
}
|
||||
|
||||
class CookieMap implements Iterable<[string, string]> {
|
||||
constructor(init?: string[][] | Record<string, string> | string);
|
||||
|
||||
get(name: string): string | null;
|
||||
|
||||
toSetCookieHeaders(): string[];
|
||||
|
||||
has(name: string): boolean;
|
||||
set(name: string, value: string, options?: CookieInit): void;
|
||||
set(options: CookieInit): void;
|
||||
delete(name: string): void;
|
||||
delete(options: CookieStoreDeleteOptions): void;
|
||||
delete(name: string, options: Omit<CookieStoreDeleteOptions, "name">): void;
|
||||
toJSON(): Record<string, string>;
|
||||
|
||||
readonly size: number;
|
||||
|
||||
entries(): IterableIterator<[string, string]>;
|
||||
keys(): IterableIterator<string>;
|
||||
values(): IterableIterator<string>;
|
||||
forEach(callback: (value: string, key: string, map: CookieMap) => void): void;
|
||||
[Symbol.iterator](): IterableIterator<[string, string]>;
|
||||
}
|
||||
```
|
||||
@@ -61,6 +61,7 @@ Routes in `Bun.serve()` receive a `BunRequest` (which extends [`Request`](https:
|
||||
// Simplified for brevity
|
||||
interface BunRequest<T extends string> extends Request {
|
||||
params: Record<T, string>;
|
||||
readonly cookies: CookieMap;
|
||||
}
|
||||
```
|
||||
|
||||
@@ -934,6 +935,83 @@ const server = Bun.serve({
|
||||
|
||||
Returns `null` for closed requests or Unix domain sockets.
|
||||
|
||||
## Working with Cookies
|
||||
|
||||
Bun provides a built-in API for working with cookies in HTTP requests and responses. The `BunRequest` object includes a `cookies` property that provides a `CookieMap` for easily accessing and manipulating cookies. When using `routes`, `Bun.serve()` automatically tracks `request.cookies.set` and applies them to the response.
|
||||
|
||||
### Reading cookies
|
||||
|
||||
Read cookies from incoming requests using the `cookies` property on the `BunRequest` object:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/profile": req => {
|
||||
// Access cookies from the request
|
||||
const userId = req.cookies.get("user_id");
|
||||
const theme = req.cookies.get("theme") || "light";
|
||||
|
||||
return Response.json({
|
||||
userId,
|
||||
theme,
|
||||
message: "Profile page",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Setting cookies
|
||||
|
||||
To set cookies, use the `set` method on the `CookieMap` from the `BunRequest` object.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/login": req => {
|
||||
const cookies = req.cookies;
|
||||
|
||||
// Set a cookie with various options
|
||||
cookies.set("user_id", "12345", {
|
||||
maxAge: 60 * 60 * 24 * 7, // 1 week
|
||||
httpOnly: true,
|
||||
secure: true,
|
||||
path: "/",
|
||||
});
|
||||
|
||||
// Add a theme preference cookie
|
||||
cookies.set("theme", "dark");
|
||||
|
||||
// Modified cookies from the request are automatically applied to the response
|
||||
return new Response("Login successful");
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`Bun.serve()` automatically tracks modified cookies from the request and applies them to the response.
|
||||
|
||||
### Deleting cookies
|
||||
|
||||
To delete a cookie, use the `delete` method on the `request.cookies` (`CookieMap`) object:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/logout": req => {
|
||||
// Delete the user_id cookie
|
||||
req.cookies.delete("user_id", {
|
||||
path: "/",
|
||||
});
|
||||
|
||||
return new Response("Logged out successfully");
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Deleted cookies become a `Set-Cookie` header on the response with the `maxAge` set to `0` and an empty `value`.
|
||||
|
||||
## Server Metrics
|
||||
|
||||
### server.pendingRequests and server.pendingWebSockets
|
||||
|
||||
492
docs/api/redis.md
Normal file
492
docs/api/redis.md
Normal file
@@ -0,0 +1,492 @@
|
||||
Bun provides native bindings for working with Redis databases with a modern, Promise-based API. The interface is designed to be simple and performant, with built-in connection management, fully typed responses, and TLS support. **New in Bun v1.2.9**
|
||||
|
||||
```ts
|
||||
import { redis } from "bun";
|
||||
|
||||
// Set a key
|
||||
await redis.set("greeting", "Hello from Bun!");
|
||||
|
||||
// Get a key
|
||||
const greeting = await redis.get("greeting");
|
||||
console.log(greeting); // "Hello from Bun!"
|
||||
|
||||
// Increment a counter
|
||||
await redis.set("counter", 0);
|
||||
await redis.incr("counter");
|
||||
|
||||
// Check if a key exists
|
||||
const exists = await redis.exists("greeting");
|
||||
|
||||
// Delete a key
|
||||
await redis.del("greeting");
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
To use the Redis client, you first need to create a connection:
|
||||
|
||||
```ts
|
||||
import { redis, RedisClient } from "bun";
|
||||
|
||||
// Using the default client (reads connection info from environment)
|
||||
// process.env.REDIS_URL is used by default
|
||||
await redis.set("hello", "world");
|
||||
const result = await redis.get("hello");
|
||||
|
||||
// Creating a custom client
|
||||
const client = new RedisClient("redis://username:password@localhost:6379");
|
||||
await client.set("counter", "0");
|
||||
await client.incr("counter");
|
||||
```
|
||||
|
||||
By default, the client reads connection information from the following environment variables (in order of precedence):
|
||||
|
||||
- `REDIS_URL`
|
||||
- If not set, defaults to `"redis://localhost:6379"`
|
||||
|
||||
### Connection Lifecycle
|
||||
|
||||
The Redis client automatically handles connections in the background:
|
||||
|
||||
```ts
|
||||
// No connection is made until a command is executed
|
||||
const client = new RedisClient();
|
||||
|
||||
// First command initiates the connection
|
||||
await client.set("key", "value");
|
||||
|
||||
// Connection remains open for subsequent commands
|
||||
await client.get("key");
|
||||
|
||||
// Explicitly close the connection when done
|
||||
client.close();
|
||||
```
|
||||
|
||||
You can also manually control the connection lifecycle:
|
||||
|
||||
```ts
|
||||
const client = new RedisClient();
|
||||
|
||||
// Explicitly connect
|
||||
await client.connect();
|
||||
|
||||
// Run commands
|
||||
await client.set("key", "value");
|
||||
|
||||
// Disconnect when done
|
||||
client.close();
|
||||
```
|
||||
|
||||
## Basic Operations
|
||||
|
||||
### String Operations
|
||||
|
||||
```ts
|
||||
// Set a key
|
||||
await redis.set("user:1:name", "Alice");
|
||||
|
||||
// Get a key
|
||||
const name = await redis.get("user:1:name");
|
||||
|
||||
// Delete a key
|
||||
await redis.del("user:1:name");
|
||||
|
||||
// Check if a key exists
|
||||
const exists = await redis.exists("user:1:name");
|
||||
|
||||
// Set expiration (in seconds)
|
||||
await redis.set("session:123", "active");
|
||||
await redis.expire("session:123", 3600); // expires in 1 hour
|
||||
|
||||
// Get time to live (in seconds)
|
||||
const ttl = await redis.ttl("session:123");
|
||||
```
|
||||
|
||||
### Numeric Operations
|
||||
|
||||
```ts
|
||||
// Set initial value
|
||||
await redis.set("counter", "0");
|
||||
|
||||
// Increment by 1
|
||||
await redis.incr("counter");
|
||||
|
||||
// Decrement by 1
|
||||
await redis.decr("counter");
|
||||
```
|
||||
|
||||
### Hash Operations
|
||||
|
||||
```ts
|
||||
// Set multiple fields in a hash
|
||||
await redis.hmset("user:123", [
|
||||
"name",
|
||||
"Alice",
|
||||
"email",
|
||||
"alice@example.com",
|
||||
"active",
|
||||
"true",
|
||||
]);
|
||||
|
||||
// Get multiple fields from a hash
|
||||
const userFields = await redis.hmget("user:123", ["name", "email"]);
|
||||
console.log(userFields); // ["Alice", "alice@example.com"]
|
||||
|
||||
// Increment a numeric field in a hash
|
||||
await redis.hincrby("user:123", "visits", 1);
|
||||
|
||||
// Increment a float field in a hash
|
||||
await redis.hincrbyfloat("user:123", "score", 1.5);
|
||||
```
|
||||
|
||||
### Set Operations
|
||||
|
||||
```ts
|
||||
// Add member to set
|
||||
await redis.sadd("tags", "javascript");
|
||||
|
||||
// Remove member from set
|
||||
await redis.srem("tags", "javascript");
|
||||
|
||||
// Check if member exists in set
|
||||
const isMember = await redis.sismember("tags", "javascript");
|
||||
|
||||
// Get all members of a set
|
||||
const allTags = await redis.smembers("tags");
|
||||
|
||||
// Get a random member
|
||||
const randomTag = await redis.srandmember("tags");
|
||||
|
||||
// Pop (remove and return) a random member
|
||||
const poppedTag = await redis.spop("tags");
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Command Execution and Pipelining
|
||||
|
||||
The client automatically pipelines commands, improving performance by sending multiple commands in a batch and processing responses as they arrive.
|
||||
|
||||
```ts
|
||||
// Commands are automatically pipelined by default
|
||||
const [infoResult, listResult] = await Promise.all([
|
||||
redis.get("user:1:name"),
|
||||
redis.get("user:2:email"),
|
||||
]);
|
||||
```
|
||||
|
||||
To disable automatic pipelining, you can set the `enableAutoPipelining` option to `false`:
|
||||
|
||||
```ts
|
||||
const client = new RedisClient("redis://localhost:6379", {
|
||||
enableAutoPipelining: false,
|
||||
});
|
||||
```
|
||||
|
||||
### Raw Commands
|
||||
|
||||
When you need to use commands that don't have convenience methods, you can use the `send` method:
|
||||
|
||||
```ts
|
||||
// Run any Redis command
|
||||
const info = await redis.send("INFO", []);
|
||||
|
||||
// LPUSH to a list
|
||||
await redis.send("LPUSH", ["mylist", "value1", "value2"]);
|
||||
|
||||
// Get list range
|
||||
const list = await redis.send("LRANGE", ["mylist", "0", "-1"]);
|
||||
```
|
||||
|
||||
The `send` method allows you to use any Redis command, even ones that don't have dedicated methods in the client. The first argument is the command name, and the second argument is an array of string arguments.
|
||||
|
||||
### Connection Events
|
||||
|
||||
You can register handlers for connection events:
|
||||
|
||||
```ts
|
||||
const client = new RedisClient();
|
||||
|
||||
// Called when successfully connected to Redis server
|
||||
client.onconnect = () => {
|
||||
console.log("Connected to Redis server");
|
||||
};
|
||||
|
||||
// Called when disconnected from Redis server
|
||||
client.onclose = error => {
|
||||
console.error("Disconnected from Redis server:", error);
|
||||
};
|
||||
|
||||
// Manually connect/disconnect
|
||||
await client.connect();
|
||||
client.close();
|
||||
```
|
||||
|
||||
### Connection Status and Monitoring
|
||||
|
||||
```ts
|
||||
// Check if connected
|
||||
console.log(client.connected); // boolean indicating connection status
|
||||
|
||||
// Check amount of data buffered (in bytes)
|
||||
console.log(client.bufferedAmount);
|
||||
```
|
||||
|
||||
### Type Conversion
|
||||
|
||||
The Redis client handles automatic type conversion for Redis responses:
|
||||
|
||||
- Integer responses are returned as JavaScript numbers
|
||||
- Bulk strings are returned as JavaScript strings
|
||||
- Simple strings are returned as JavaScript strings
|
||||
- Null bulk strings are returned as `null`
|
||||
- Array responses are returned as JavaScript arrays
|
||||
- Error responses throw JavaScript errors with appropriate error codes
|
||||
- Boolean responses (RESP3) are returned as JavaScript booleans
|
||||
- Map responses (RESP3) are returned as JavaScript objects
|
||||
- Set responses (RESP3) are returned as JavaScript arrays
|
||||
|
||||
Special handling for specific commands:
|
||||
|
||||
- `EXISTS` returns a boolean instead of a number (1 becomes true, 0 becomes false)
|
||||
- `SISMEMBER` returns a boolean (1 becomes true, 0 becomes false)
|
||||
|
||||
The following commands disable automatic pipelining:
|
||||
|
||||
- `AUTH`
|
||||
- `INFO`
|
||||
- `QUIT`
|
||||
- `EXEC`
|
||||
- `MULTI`
|
||||
- `WATCH`
|
||||
- `SCRIPT`
|
||||
- `SELECT`
|
||||
- `CLUSTER`
|
||||
- `DISCARD`
|
||||
- `UNWATCH`
|
||||
- `PIPELINE`
|
||||
- `SUBSCRIBE`
|
||||
- `UNSUBSCRIBE`
|
||||
- `UNPSUBSCRIBE`
|
||||
|
||||
## Connection Options
|
||||
|
||||
When creating a client, you can pass various options to configure the connection:
|
||||
|
||||
```ts
|
||||
const client = new RedisClient("redis://localhost:6379", {
|
||||
// Connection timeout in milliseconds (default: 10000)
|
||||
connectionTimeout: 5000,
|
||||
|
||||
// Idle timeout in milliseconds (default: 0 = no timeout)
|
||||
idleTimeout: 30000,
|
||||
|
||||
// Whether to automatically reconnect on disconnection (default: true)
|
||||
autoReconnect: true,
|
||||
|
||||
// Maximum number of reconnection attempts (default: 10)
|
||||
maxRetries: 10,
|
||||
|
||||
// Whether to queue commands when disconnected (default: true)
|
||||
enableOfflineQueue: true,
|
||||
|
||||
// Whether to automatically pipeline commands (default: true)
|
||||
enableAutoPipelining: true,
|
||||
|
||||
// TLS options (default: false)
|
||||
tls: true,
|
||||
// Alternatively, provide custom TLS config:
|
||||
// tls: {
|
||||
// rejectUnauthorized: true,
|
||||
// ca: "path/to/ca.pem",
|
||||
// cert: "path/to/cert.pem",
|
||||
// key: "path/to/key.pem",
|
||||
// }
|
||||
});
|
||||
```
|
||||
|
||||
### Reconnection Behavior
|
||||
|
||||
When a connection is lost, the client automatically attempts to reconnect with exponential backoff:
|
||||
|
||||
1. The client starts with a small delay (50ms) and doubles it with each attempt
|
||||
2. Reconnection delay is capped at 2000ms (2 seconds)
|
||||
3. The client attempts to reconnect up to `maxRetries` times (default: 10)
|
||||
4. Commands executed during disconnection are:
|
||||
- Queued if `enableOfflineQueue` is true (default)
|
||||
- Rejected immediately if `enableOfflineQueue` is false
|
||||
|
||||
## Supported URL Formats
|
||||
|
||||
The Redis client supports various URL formats:
|
||||
|
||||
```ts
|
||||
// Standard Redis URL
|
||||
new RedisClient("redis://localhost:6379");
|
||||
new RedisClient("redis://localhost:6379");
|
||||
|
||||
// With authentication
|
||||
new RedisClient("redis://username:password@localhost:6379");
|
||||
|
||||
// With database number
|
||||
new RedisClient("redis://localhost:6379/0");
|
||||
|
||||
// TLS connections
|
||||
new RedisClient("rediss://localhost:6379");
|
||||
new RedisClient("rediss://localhost:6379");
|
||||
new RedisClient("redis+tls://localhost:6379");
|
||||
new RedisClient("redis+tls://localhost:6379");
|
||||
|
||||
// Unix socket connections
|
||||
new RedisClient("redis+unix:///path/to/socket");
|
||||
new RedisClient("redis+unix:///path/to/socket");
|
||||
|
||||
// TLS over Unix socket
|
||||
new RedisClient("redis+tls+unix:///path/to/socket");
|
||||
new RedisClient("redis+tls+unix:///path/to/socket");
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The Redis client throws typed errors for different scenarios:
|
||||
|
||||
```ts
|
||||
try {
|
||||
await redis.get("non-existent-key");
|
||||
} catch (error) {
|
||||
if (error.code === "ERR_REDIS_CONNECTION_CLOSED") {
|
||||
console.error("Connection to Redis server was closed");
|
||||
} else if (error.code === "ERR_REDIS_AUTHENTICATION_FAILED") {
|
||||
console.error("Authentication failed");
|
||||
} else {
|
||||
console.error("Unexpected error:", error);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Common error codes:
|
||||
|
||||
- `ERR_REDIS_CONNECTION_CLOSED` - Connection to the server was closed
|
||||
- `ERR_REDIS_AUTHENTICATION_FAILED` - Failed to authenticate with the server
|
||||
- `ERR_REDIS_INVALID_RESPONSE` - Received an invalid response from the server
|
||||
|
||||
## Example Use Cases
|
||||
|
||||
### Caching
|
||||
|
||||
```ts
|
||||
async function getUserWithCache(userId) {
|
||||
const cacheKey = `user:${userId}`;
|
||||
|
||||
// Try to get from cache first
|
||||
const cachedUser = await redis.get(cacheKey);
|
||||
if (cachedUser) {
|
||||
return JSON.parse(cachedUser);
|
||||
}
|
||||
|
||||
// Not in cache, fetch from database
|
||||
const user = await database.getUser(userId);
|
||||
|
||||
// Store in cache for 1 hour
|
||||
await redis.set(cacheKey, JSON.stringify(user));
|
||||
await redis.expire(cacheKey, 3600);
|
||||
|
||||
return user;
|
||||
}
|
||||
```
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
```ts
|
||||
async function rateLimit(ip, limit = 100, windowSecs = 3600) {
|
||||
const key = `ratelimit:${ip}`;
|
||||
|
||||
// Increment counter
|
||||
const count = await redis.incr(key);
|
||||
|
||||
// Set expiry if this is the first request in window
|
||||
if (count === 1) {
|
||||
await redis.expire(key, windowSecs);
|
||||
}
|
||||
|
||||
// Check if limit exceeded
|
||||
return {
|
||||
limited: count > limit,
|
||||
remaining: Math.max(0, limit - count),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Session Storage
|
||||
|
||||
```ts
|
||||
async function createSession(userId, data) {
|
||||
const sessionId = crypto.randomUUID();
|
||||
const key = `session:${sessionId}`;
|
||||
|
||||
// Store session with expiration
|
||||
await redis.hmset(key, [
|
||||
"userId",
|
||||
userId.toString(),
|
||||
"created",
|
||||
Date.now().toString(),
|
||||
"data",
|
||||
JSON.stringify(data),
|
||||
]);
|
||||
await redis.expire(key, 86400); // 24 hours
|
||||
|
||||
return sessionId;
|
||||
}
|
||||
|
||||
async function getSession(sessionId) {
|
||||
const key = `session:${sessionId}`;
|
||||
|
||||
// Get session data
|
||||
const exists = await redis.exists(key);
|
||||
if (!exists) return null;
|
||||
|
||||
const [userId, created, data] = await redis.hmget(key, [
|
||||
"userId",
|
||||
"created",
|
||||
"data",
|
||||
]);
|
||||
|
||||
return {
|
||||
userId: Number(userId),
|
||||
created: Number(created),
|
||||
data: JSON.parse(data),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
Bun's Redis client is implemented in Zig and uses the Redis Serialization Protocol (RESP3). It manages connections efficiently and provides automatic reconnection with exponential backoff.
|
||||
|
||||
The client supports pipelining commands, meaning multiple commands can be sent without waiting for the replies to previous commands. This significantly improves performance when sending multiple commands in succession.
|
||||
|
||||
### RESP3 Protocol Support
|
||||
|
||||
Bun's Redis client uses the newer RESP3 protocol by default, which provides more data types and features compared to RESP2:
|
||||
|
||||
- Better error handling with typed errors
|
||||
- Native Boolean responses
|
||||
- Map/Dictionary responses (key-value objects)
|
||||
- Set responses
|
||||
- Double (floating point) values
|
||||
- BigNumber support for large integer values
|
||||
|
||||
When connecting to Redis servers using older versions that don't support RESP3, the client automatically fallbacks to compatible modes.
|
||||
|
||||
## Limitations and Future Plans
|
||||
|
||||
Current limitations of the Redis client we are planning to address in future versions:
|
||||
|
||||
- [ ] No dedicated API for pub/sub functionality (though you can use the raw command API)
|
||||
- [ ] Transactions (MULTI/EXEC) must be done through raw commands for now
|
||||
- [ ] Streams are supported but without dedicated methods
|
||||
|
||||
Unsupported features:
|
||||
|
||||
- Redis Sentinel
|
||||
- Redis Cluster
|
||||
103
docs/api/s3.md
103
docs/api/s3.md
@@ -619,6 +619,48 @@ When the S3 Object Storage service returns an error (that is, not Bun), it will
|
||||
|
||||
The `S3Client` class provides several static methods for interacting with S3.
|
||||
|
||||
### `S3Client.write` (static)
|
||||
|
||||
To write data directly to a path in the bucket, you can use the `S3Client.write` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
// Write string
|
||||
await S3Client.write("my-file.txt", "Hello World");
|
||||
|
||||
// Write JSON with type
|
||||
await S3Client.write(
|
||||
"data.json",
|
||||
JSON.stringify({hello: "world"}),
|
||||
{
|
||||
...credentials,
|
||||
type: "application/json",
|
||||
}
|
||||
);
|
||||
|
||||
// Write from fetch
|
||||
const res = await fetch("https://example.com/data");
|
||||
await S3Client.write("data.bin", res, credentials);
|
||||
|
||||
// Write with ACL
|
||||
await S3Client.write("public.html", html, {
|
||||
...credentials,
|
||||
acl: "public-read",
|
||||
type: "text/html"
|
||||
});
|
||||
```
|
||||
|
||||
This is equivalent to calling `new S3Client(credentials).write("my-file.txt", "Hello World")`.
|
||||
|
||||
### `S3Client.presign` (static)
|
||||
|
||||
To generate a presigned URL for an S3 file, you can use the `S3Client.presign` static method.
|
||||
@@ -642,6 +684,45 @@ const url = S3Client.presign("my-file.txt", {
|
||||
|
||||
This is equivalent to calling `new S3Client(credentials).presign("my-file.txt", { expiresIn: 3600 })`.
|
||||
|
||||
### `S3Client.list` (static)
|
||||
|
||||
To list some or all (up to 1,000) objects in a bucket, you can use the `S3Client.list` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
// List (up to) 1000 objects in the bucket
|
||||
const allObjects = await S3Client.list(null, credentials);
|
||||
|
||||
// List (up to) 500 objects under `uploads/` prefix, with owner field for each object
|
||||
const uploads = await S3Client.list({
|
||||
prefix: 'uploads/',
|
||||
maxKeys: 500,
|
||||
fetchOwner: true,
|
||||
}, credentials);
|
||||
|
||||
// Check if more results are available
|
||||
if (uploads.isTruncated) {
|
||||
// List next batch of objects under `uploads/` prefix
|
||||
const moreUploads = await S3Client.list({
|
||||
prefix: 'uploads/',
|
||||
maxKeys: 500,
|
||||
startAfter: uploads.contents!.at(-1).key
|
||||
fetchOwner: true,
|
||||
}, credentials);
|
||||
}
|
||||
```
|
||||
|
||||
This is equivalent to calling `new S3Client(credentials).list()`.
|
||||
|
||||
### `S3Client.exists` (static)
|
||||
|
||||
To check if an S3 file exists, you can use the `S3Client.exists` static method.
|
||||
@@ -654,6 +735,7 @@ const credentials = {
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
const exists = await S3Client.exists("my-file.txt", credentials);
|
||||
@@ -670,6 +752,26 @@ const s3file = s3.file("my-file.txt", {
|
||||
const exists = await s3file.exists();
|
||||
```
|
||||
|
||||
### `S3Client.size` (static)
|
||||
|
||||
To quickly check the size of S3 file without downloading it, you can use the `S3Client.size` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
const bytes = await S3Client.size("my-file.txt", credentials);
|
||||
```
|
||||
|
||||
This is equivalent to calling `new S3Client(credentials).size("my-file.txt")`.
|
||||
|
||||
### `S3Client.stat` (static)
|
||||
|
||||
To get the size, etag, and other metadata of an S3 file, you can use the `S3Client.stat` static method.
|
||||
@@ -682,6 +784,7 @@ const credentials = {
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
const stat = await S3Client.stat("my-file.txt", credentials);
|
||||
|
||||
@@ -253,6 +253,19 @@ const proc = Bun.spawn({
|
||||
|
||||
The `killSignal` option also controls which signal is sent when an AbortSignal is aborted.
|
||||
|
||||
## Using maxBuffer
|
||||
|
||||
For spawnSync, you can limit the maximum number of bytes of output before the process is killed:
|
||||
|
||||
```ts
|
||||
// KIll 'yes' after it emits over 100 bytes of output
|
||||
const result = Bun.spawnSync({
|
||||
cmd: ["yes"], // or ["bun", "exec", "yes"] on windows
|
||||
maxBuffer: 100,
|
||||
});
|
||||
// process exits
|
||||
```
|
||||
|
||||
## Inter-process communication (IPC)
|
||||
|
||||
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
|
||||
@@ -423,6 +436,7 @@ namespace SpawnOptions {
|
||||
signal?: AbortSignal;
|
||||
timeout?: number;
|
||||
killSignal?: string | number;
|
||||
maxBuffer?: number;
|
||||
}
|
||||
|
||||
type Readable =
|
||||
|
||||
@@ -240,7 +240,7 @@ const result = await sql.unsafe(
|
||||
|
||||
### Execute and Cancelling Queries
|
||||
|
||||
Bun's SQL is lazy that means its will only start executing when awaited or executed with `.execute()`.
|
||||
Bun's SQL is lazy, which means it will only start executing when awaited or executed with `.execute()`.
|
||||
You can cancel a query that is currently executing by calling the `cancel()` method on the query object.
|
||||
|
||||
```ts
|
||||
|
||||
@@ -117,14 +117,14 @@ type WebSocketData = {
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
fetch(req, server) {
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
const cookies = new Bun.CookieMap(req.headers.get("cookie")!);
|
||||
|
||||
server.upgrade(req, {
|
||||
// this object must conform to WebSocketData
|
||||
data: {
|
||||
createdAt: Date.now(),
|
||||
channelId: new URL(req.url).searchParams.get("channelId"),
|
||||
authToken: cookies["X-Token"],
|
||||
authToken: cookies.get("X-Token"),
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
|
||||
```jsonc
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Enable latest features
|
||||
"lib": ["ESNext", "DOM"],
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
@@ -33,11 +33,12 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
|
||||
// Some stricter flags
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noPropertyAccessFromIndexSignature": true,
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
37
docs/nav.ts
37
docs/nav.ts
@@ -265,12 +265,25 @@ export default {
|
||||
page("test/time", "Dates and times", {
|
||||
description: "Control the date & time in your tests for more reliable and deterministic tests",
|
||||
}),
|
||||
page("test/dom", "DOM testing", {
|
||||
description: "Write headless tests for UI and React/Vue/Svelte/Lit components with happy-dom",
|
||||
}),
|
||||
|
||||
page("test/coverage", "Code coverage", {
|
||||
description: "Generate code coverage reports with `bun test --coverage`",
|
||||
}),
|
||||
page("test/reporters", "Test reporters", {
|
||||
description: "Add a junit reporter to your test runs",
|
||||
}),
|
||||
page("test/configuration", "Test configuration", {
|
||||
description: "Configure the test runner with bunfig.toml",
|
||||
}),
|
||||
page("test/runtime-behavior", "Runtime behavior", {
|
||||
description: "Learn how the test runner affects Bun's runtime behavior",
|
||||
}),
|
||||
page("test/discovery", "Finding tests", {
|
||||
description: "Learn how the test runner discovers tests",
|
||||
}),
|
||||
page("test/dom", "DOM testing", {
|
||||
description: "Write headless tests for UI and React/Vue/Svelte/Lit components with happy-dom",
|
||||
}),
|
||||
|
||||
divider("Package runner"),
|
||||
page("cli/bunx", "`bunx`", {
|
||||
@@ -331,6 +344,9 @@ export default {
|
||||
page("api/file-io", "File I/O", {
|
||||
description: `Read and write files fast with Bun's heavily optimized file system API.`,
|
||||
}), // "`Bun.write`"),
|
||||
page("api/redis", "Redis client", {
|
||||
description: `Bun provides a fast, native Redis client with automatic command pipelining for better performance.`,
|
||||
}),
|
||||
page("api/import-meta", "import.meta", {
|
||||
description: `Module-scoped metadata and utilities`,
|
||||
}), // "`bun:sqlite`"),
|
||||
@@ -355,24 +371,24 @@ export default {
|
||||
page("api/spawn", "Child processes", {
|
||||
description: `Spawn sync and async child processes with easily configurable input and output streams.`,
|
||||
}), // "`Bun.spawn`"),
|
||||
page("api/transpiler", "Transpiler", {
|
||||
description: `Bun exposes its internal transpiler as a pluggable API.`,
|
||||
}), // "`Bun.Transpiler`"),
|
||||
page("api/html-rewriter", "HTMLRewriter", {
|
||||
description: `Parse and transform HTML with Bun's native HTMLRewriter API, inspired by Cloudflare Workers.`,
|
||||
}), // "`HTMLRewriter`"),
|
||||
page("api/hashing", "Hashing", {
|
||||
description: `Native support for a range of fast hashing algorithms.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/console", "Console", {
|
||||
description: `Bun implements a Node.js-compatible \`console\` object with colorized output and deep pretty-printing.`,
|
||||
}), // "`Node-API`"),
|
||||
page("api/cookie", "Cookie", {
|
||||
description: "Bun's native Cookie API simplifies working with HTTP cookies.",
|
||||
}), // "`Node-API`"),
|
||||
page("api/ffi", "FFI", {
|
||||
description: `Call native code from JavaScript with Bun's foreign function interface (FFI) API.`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/cc", "C Compiler", {
|
||||
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/html-rewriter", "HTMLRewriter", {
|
||||
description: `Parse and transform HTML with Bun's native HTMLRewriter API, inspired by Cloudflare Workers.`,
|
||||
}), // "`HTMLRewriter`"),
|
||||
page("api/test", "Testing", {
|
||||
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
|
||||
}), // "`bun:test`"),
|
||||
@@ -398,6 +414,9 @@ export default {
|
||||
page("api/color", "Color", {
|
||||
description: `Bun's color function leverages Bun's CSS parser for parsing, normalizing, and converting colors from user input to a variety of output formats.`,
|
||||
}), // "`Color`"),
|
||||
page("api/transpiler", "Transpiler", {
|
||||
description: `Bun exposes its internal transpiler as a pluggable API.`,
|
||||
}), // "`Bun.Transpiler`"),
|
||||
|
||||
// divider("Dev Server"),
|
||||
// page("bun-dev", "Vanilla"),
|
||||
|
||||
@@ -32,7 +32,7 @@ pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:events`](https://nodejs.org/api/events.html)
|
||||
|
||||
🟢 Fully implemented. `EventEmitterAsyncResource` uses `AsyncResource` underneath. 100% of Node.js's test suite for EventEmitter passes.
|
||||
🟢 Fully implemented. 100% of Node.js's test suite passes. `EventEmitterAsyncResource` uses `AsyncResource` underneath.
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
@@ -104,9 +104,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:crypto`](https://nodejs.org/api/crypto.html)
|
||||
|
||||
🟡 Missing `ECDH` `checkPrime` `checkPrimeSync` `generatePrime` `generatePrimeSync` `hkdf` `hkdfSync` `secureHeapUsed` `setEngine` `setFips`
|
||||
|
||||
Some methods are not optimized yet.
|
||||
🟡 Missing `secureHeapUsed` `setEngine` `setFips`
|
||||
|
||||
### [`node:domain`](https://nodejs.org/api/domain.html)
|
||||
|
||||
@@ -118,7 +116,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:module`](https://nodejs.org/api/module.html)
|
||||
|
||||
🟡 Missing `runMain` `syncBuiltinESMExports`, `Module#load()`. Overriding `require.cache` is supported for ESM & CJS modules. `module._extensions`, `module._pathCache`, `module._cache` are no-ops. `module.register` is not implemented and we recommend using a [`Bun.plugin`](https://bun.sh/docs/runtime/plugins) in the meantime.
|
||||
🟡 Missing `syncBuiltinESMExports`, `Module#load()`. Overriding `require.cache` is supported for ESM & CJS modules. `module._extensions`, `module._pathCache`, `module._cache` are no-ops. `module.register` is not implemented and we recommend using a [`Bun.plugin`](https://bun.sh/docs/runtime/plugins) in the meantime.
|
||||
|
||||
### [`node:net`](https://nodejs.org/api/net.html)
|
||||
|
||||
@@ -142,7 +140,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:util`](https://nodejs.org/api/util.html)
|
||||
|
||||
🟡 Missing `getCallSite` `getCallSites` `getSystemErrorMap` `getSystemErrorMessage` `transferableAbortSignal` `transferableAbortController` `MIMEType` `MIMEParams`
|
||||
🟡 Missing `getCallSite` `getCallSites` `getSystemErrorMap` `getSystemErrorMessage` `transferableAbortSignal` `transferableAbortController`
|
||||
|
||||
### [`node:v8`](https://nodejs.org/api/v8.html)
|
||||
|
||||
@@ -378,7 +376,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`require()`](https://nodejs.org/api/globals.html#require)
|
||||
|
||||
🟢 Fully implemented, including [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options). `require.extensions` is a stub.
|
||||
🟢 Fully implemented, including [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options).
|
||||
|
||||
### [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||
|
||||
|
||||
87
docs/test/configuration.md
Normal file
87
docs/test/configuration.md
Normal file
@@ -0,0 +1,87 @@
|
||||
Configure `bun test` via `bunfig.toml` file and command-line options. This page documents the available configuration options for `bun test`.
|
||||
|
||||
## bunfig.toml options
|
||||
|
||||
You can configure `bun test` behavior by adding a `[test]` section to your `bunfig.toml` file:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Options go here
|
||||
```
|
||||
|
||||
### Test discovery
|
||||
|
||||
#### root
|
||||
|
||||
The `root` option specifies a root directory for test discovery, overriding the default behavior of scanning from the project root.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
root = "src" # Only scan for tests in the src directory
|
||||
```
|
||||
|
||||
### Reporters
|
||||
|
||||
#### reporter.junit
|
||||
|
||||
Configure the JUnit reporter output file path directly in the config file:
|
||||
|
||||
```toml
|
||||
[test.reporter]
|
||||
junit = "path/to/junit.xml" # Output path for JUnit XML report
|
||||
```
|
||||
|
||||
This complements the `--reporter=junit` and `--reporter-outfile` CLI flags.
|
||||
|
||||
### Memory usage
|
||||
|
||||
#### smol
|
||||
|
||||
Enable the `--smol` memory-saving mode specifically for the test runner:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
smol = true # Reduce memory usage during test runs
|
||||
```
|
||||
|
||||
This is equivalent to using the `--smol` flag on the command line.
|
||||
|
||||
### Coverage options
|
||||
|
||||
In addition to the options documented in the [coverage documentation](./coverage.md), the following options are available:
|
||||
|
||||
#### coverageSkipTestFiles
|
||||
|
||||
Exclude files matching test patterns (e.g., \*.test.ts) from the coverage report:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageSkipTestFiles = true # Exclude test files from coverage reports
|
||||
```
|
||||
|
||||
#### coverageThreshold (Object form)
|
||||
|
||||
The coverage threshold can be specified either as a number (as shown in the coverage documentation) or as an object with specific thresholds:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Set specific thresholds for different coverage metrics
|
||||
coverageThreshold = { lines = 0.9, functions = 0.8, statements = 0.85 }
|
||||
```
|
||||
|
||||
Setting any of these enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
#### coverageIgnoreSourcemaps
|
||||
|
||||
Internally, Bun transpiles every file. That means code coverage must also go through sourcemaps before they can be reported. We expose this as a flag to allow you to opt out of this behavior, but it will be confusing because during the transpilation process, Bun may move code around and change variable names. This option is mostly useful for debugging coverage issues.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageIgnoreSourcemaps = true # Don't use sourcemaps for coverage analysis
|
||||
```
|
||||
|
||||
When using this option, you probably want to stick a `// @bun` comment at the top of the source file to opt out of the transpilation process.
|
||||
|
||||
### Install settings inheritance
|
||||
|
||||
The `bun test` command inherits relevant network and installation configuration (registry, cafile, prefer, exact, etc.) from the `[install]` section of bunfig.toml. This is important if tests need to interact with private registries or require specific install behaviors triggered during the test run.
|
||||
@@ -52,9 +52,22 @@ It is possible to specify a coverage threshold in `bunfig.toml`. If your test su
|
||||
coverageThreshold = 0.9
|
||||
|
||||
# to set different thresholds for lines and functions
|
||||
coverageThreshold = { lines = 0.9, functions = 0.9 }
|
||||
coverageThreshold = { lines = 0.9, functions = 0.9, statements = 0.9 }
|
||||
```
|
||||
|
||||
Setting any of these thresholds enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
|
||||
|
||||
### Exclude test files from coverage
|
||||
|
||||
By default, test files themselves are included in coverage reports. You can exclude them with:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageSkipTestFiles = true # default false
|
||||
```
|
||||
|
||||
This will exclude files matching test patterns (e.g., _.test.ts, _\_spec.js) from the coverage report.
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
@@ -64,6 +77,14 @@ Internally, Bun transpiles all files by default, so Bun automatically generates
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
### Coverage defaults
|
||||
|
||||
By default, coverage reports:
|
||||
|
||||
1. Exclude `node_modules` directories
|
||||
2. Exclude files loaded via non-JS/TS loaders (e.g., .css, .txt) unless a custom JS loader is specified
|
||||
3. Include test files themselves (can be disabled with `coverageSkipTestFiles = true` as shown above)
|
||||
|
||||
### Coverage reporters
|
||||
|
||||
By default, coverage reports will be printed to the console.
|
||||
|
||||
85
docs/test/discovery.md
Normal file
85
docs/test/discovery.md
Normal file
@@ -0,0 +1,85 @@
|
||||
bun test's file discovery mechanism determines which files to run as tests. Understanding how it works helps you structure your test files effectively.
|
||||
|
||||
## Default Discovery Logic
|
||||
|
||||
By default, `bun test` recursively searches the project directory for files that match specific patterns:
|
||||
|
||||
- `*.test.{js|jsx|ts|tsx}` - Files ending with `.test.js`, `.test.jsx`, `.test.ts`, or `.test.tsx`
|
||||
- `*_test.{js|jsx|ts|tsx}` - Files ending with `_test.js`, `_test.jsx`, `_test.ts`, or `_test.tsx`
|
||||
- `*.spec.{js|jsx|ts|tsx}` - Files ending with `.spec.js`, `.spec.jsx`, `.spec.ts`, or `.spec.tsx`
|
||||
- `*_spec.{js|jsx|ts|tsx}` - Files ending with `_spec.js`, `_spec.jsx`, `_spec.ts`, or `_spec.tsx`
|
||||
|
||||
## Exclusions
|
||||
|
||||
By default, Bun test ignores:
|
||||
|
||||
- `node_modules` directories
|
||||
- Hidden directories (those starting with a period `.`)
|
||||
- Files that don't have JavaScript-like extensions (based on available loaders)
|
||||
|
||||
## Customizing Test Discovery
|
||||
|
||||
### Position Arguments as Filters
|
||||
|
||||
You can filter which test files run by passing additional positional arguments to `bun test`:
|
||||
|
||||
```bash
|
||||
$ bun test <filter> <filter> ...
|
||||
```
|
||||
|
||||
Any test file with a path that contains one of the filters will run. These filters are simple substring matches, not glob patterns.
|
||||
|
||||
For example, to run all tests in a `utils` directory:
|
||||
|
||||
```bash
|
||||
$ bun test utils
|
||||
```
|
||||
|
||||
This would match files like `src/utils/string.test.ts` and `lib/utils/array_test.js`.
|
||||
|
||||
### Specifying Exact File Paths
|
||||
|
||||
To run a specific file in the test runner, make sure the path starts with `./` or `/` to distinguish it from a filter name:
|
||||
|
||||
```bash
|
||||
$ bun test ./test/specific-file.test.ts
|
||||
```
|
||||
|
||||
### Filter by Test Name
|
||||
|
||||
To filter tests by name rather than file path, use the `-t`/`--test-name-pattern` flag with a regex pattern:
|
||||
|
||||
```sh
|
||||
# run all tests with "addition" in the name
|
||||
$ bun test --test-name-pattern addition
|
||||
```
|
||||
|
||||
The pattern is matched against a concatenated string of the test name prepended with the labels of all its parent describe blocks, separated by spaces. For example, a test defined as:
|
||||
|
||||
```js
|
||||
describe("Math", () => {
|
||||
describe("operations", () => {
|
||||
test("should add correctly", () => {
|
||||
// ...
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Would be matched against the string "Math operations should add correctly".
|
||||
|
||||
### Changing the Root Directory
|
||||
|
||||
By default, Bun looks for test files starting from the current working directory. You can change this with the `root` option in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
root = "src" # Only scan for tests in the src directory
|
||||
```
|
||||
|
||||
## Execution Order
|
||||
|
||||
Tests are run in the following order:
|
||||
|
||||
1. Test files are executed sequentially (not in parallel)
|
||||
2. Within each file, tests run sequentially based on their definition order
|
||||
@@ -56,9 +56,9 @@ The following properties and methods are implemented on mock functions.
|
||||
- [x] [mockFn.mock.instances](https://jestjs.io/docs/mock-function-api#mockfnmockinstances)
|
||||
- [x] [mockFn.mock.contexts](https://jestjs.io/docs/mock-function-api#mockfnmockcontexts)
|
||||
- [x] [mockFn.mock.lastCall](https://jestjs.io/docs/mock-function-api#mockfnmocklastcall)
|
||||
- [x] [mockFn.mockClear()](https://jestjs.io/docs/mock-function-api#mockfnmockclear)
|
||||
- [x] [mockFn.mockReset()](https://jestjs.io/docs/mock-function-api#mockfnmockreset)
|
||||
- [x] [mockFn.mockRestore()](https://jestjs.io/docs/mock-function-api#mockfnmockrestore)
|
||||
- [x] [mockFn.mockClear()](https://jestjs.io/docs/mock-function-api#mockfnmockclear) - Clears call history
|
||||
- [x] [mockFn.mockReset()](https://jestjs.io/docs/mock-function-api#mockfnmockreset) - Clears call history and removes implementation
|
||||
- [x] [mockFn.mockRestore()](https://jestjs.io/docs/mock-function-api#mockfnmockrestore) - Restores original implementation
|
||||
- [x] [mockFn.mockImplementation(fn)](https://jestjs.io/docs/mock-function-api#mockfnmockimplementationfn)
|
||||
- [x] [mockFn.mockImplementationOnce(fn)](https://jestjs.io/docs/mock-function-api#mockfnmockimplementationoncefn)
|
||||
- [x] [mockFn.mockName(name)](https://jestjs.io/docs/mock-function-api#mockfnmocknamename)
|
||||
@@ -197,7 +197,59 @@ After resolution, the mocked module is stored in the ES Module registry **and**
|
||||
|
||||
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
|
||||
|
||||
## Restore all function mocks to their original values with `mock.restore()`
|
||||
### Module Mock Implementation Details
|
||||
|
||||
Understanding how `mock.module()` works helps you use it more effectively:
|
||||
|
||||
1. **Cache Interaction**: Module mocks interacts with both ESM and CommonJS module caches.
|
||||
|
||||
2. **Lazy Evaluation**: The mock factory callback is only evaluated when the module is actually imported or required.
|
||||
|
||||
3. **Path Resolution**: Bun automatically resolves the module specifier as though you were doing an import, supporting:
|
||||
- Relative paths (`'./module'`)
|
||||
- Absolute paths (`'/path/to/module'`)
|
||||
- Package names (`'lodash'`)
|
||||
|
||||
4. **Import Timing Effects**:
|
||||
- When mocking before first import: No side effects from the original module occur
|
||||
- When mocking after import: The original module's side effects have already happened
|
||||
- For this reason, using `--preload` is recommended for mocks that need to prevent side effects
|
||||
|
||||
5. **Live Bindings**: Mocked ESM modules maintain live bindings, so changing the mock will update all existing imports
|
||||
|
||||
## Global Mock Functions
|
||||
|
||||
### Clear all mocks with `mock.clearAllMocks()`
|
||||
|
||||
Reset all mock function state (calls, results, etc.) without restoring their original implementation:
|
||||
|
||||
```ts
|
||||
import { expect, mock, test } from "bun:test";
|
||||
|
||||
const random1 = mock(() => Math.random());
|
||||
const random2 = mock(() => Math.random());
|
||||
|
||||
test("clearing all mocks", () => {
|
||||
random1();
|
||||
random2();
|
||||
|
||||
expect(random1).toHaveBeenCalledTimes(1);
|
||||
expect(random2).toHaveBeenCalledTimes(1);
|
||||
|
||||
mock.clearAllMocks();
|
||||
|
||||
expect(random1).toHaveBeenCalledTimes(0);
|
||||
expect(random2).toHaveBeenCalledTimes(0);
|
||||
|
||||
// Note: implementations are preserved
|
||||
expect(typeof random1()).toBe("number");
|
||||
expect(typeof random2()).toBe("number");
|
||||
});
|
||||
```
|
||||
|
||||
This resets the `.mock.calls`, `.mock.instances`, `.mock.contexts`, and `.mock.results` properties of all mocks, but unlike `mock.restore()`, it does not restore the original implementation.
|
||||
|
||||
### Restore all function mocks with `mock.restore()`
|
||||
|
||||
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
|
||||
|
||||
@@ -234,3 +286,28 @@ test('foo, bar, baz', () => {
|
||||
expect(bazSpy).toBe('baz');
|
||||
});
|
||||
```
|
||||
|
||||
## Vitest Compatibility
|
||||
|
||||
For added compatibility with tests written for [Vitest](https://vitest.dev/), Bun provides the `vi` global object as an alias for parts of the Jest mocking API:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
// Using the 'vi' alias similar to Vitest
|
||||
test("vitest compatibility", () => {
|
||||
const mockFn = vi.fn(() => 42);
|
||||
|
||||
mockFn();
|
||||
expect(mockFn).toHaveBeenCalled();
|
||||
|
||||
// The following functions are available on the vi object:
|
||||
// vi.fn
|
||||
// vi.spyOn
|
||||
// vi.mock
|
||||
// vi.restoreAllMocks
|
||||
// vi.clearAllMocks
|
||||
});
|
||||
```
|
||||
|
||||
This makes it easier to port tests from Vitest to Bun without having to rewrite all your mocks.
|
||||
|
||||
108
docs/test/reporters.md
Normal file
108
docs/test/reporters.md
Normal file
@@ -0,0 +1,108 @@
|
||||
bun test supports different output formats through reporters. This document covers both built-in reporters and how to implement your own custom reporters.
|
||||
|
||||
## Built-in Reporters
|
||||
|
||||
### Default Console Reporter
|
||||
|
||||
By default, bun test outputs results to the console in a human-readable format:
|
||||
|
||||
```sh
|
||||
test/package-json-lint.test.ts:
|
||||
✓ test/package.json [0.88ms]
|
||||
✓ test/js/third_party/grpc-js/package.json [0.18ms]
|
||||
✓ test/js/third_party/svelte/package.json [0.21ms]
|
||||
✓ test/js/third_party/express/package.json [1.05ms]
|
||||
|
||||
4 pass
|
||||
0 fail
|
||||
4 expect() calls
|
||||
Ran 4 tests in 1.44ms
|
||||
```
|
||||
|
||||
When a terminal doesn't support colors, the output avoids non-ascii characters:
|
||||
|
||||
```sh
|
||||
test/package-json-lint.test.ts:
|
||||
(pass) test/package.json [0.48ms]
|
||||
(pass) test/js/third_party/grpc-js/package.json [0.10ms]
|
||||
(pass) test/js/third_party/svelte/package.json [0.04ms]
|
||||
(pass) test/js/third_party/express/package.json [0.04ms]
|
||||
|
||||
4 pass
|
||||
0 fail
|
||||
4 expect() calls
|
||||
Ran 4 tests across 1 files. [0.66ms]
|
||||
```
|
||||
|
||||
### JUnit XML Reporter
|
||||
|
||||
For CI/CD environments, Bun supports generating JUnit XML reports. JUnit XML is a widely-adopted format for test results that can be parsed by many CI/CD systems, including GitLab, Jenkins, and others.
|
||||
|
||||
#### Using the JUnit Reporter
|
||||
|
||||
To generate a JUnit XML report, use the `--reporter=junit` flag along with `--reporter-outfile` to specify the output file:
|
||||
|
||||
```sh
|
||||
$ bun test --reporter=junit --reporter-outfile=./junit.xml
|
||||
```
|
||||
|
||||
This continues to output to the console as usual while also writing the JUnit XML report to the specified path at the end of the test run.
|
||||
|
||||
#### Configuring via bunfig.toml
|
||||
|
||||
You can also configure the JUnit reporter in your `bunfig.toml` file:
|
||||
|
||||
```toml
|
||||
[test.reporter]
|
||||
junit = "path/to/junit.xml" # Output path for JUnit XML report
|
||||
```
|
||||
|
||||
#### Environment Variables in JUnit Reports
|
||||
|
||||
The JUnit reporter automatically includes environment information as `<properties>` in the XML output. This can be helpful for tracking test runs in CI environments.
|
||||
|
||||
Specifically, it includes the following environment variables when available:
|
||||
|
||||
| Environment Variable | Property Name | Description |
|
||||
| ----------------------------------------------------------------------- | ------------- | ---------------------- |
|
||||
| `GITHUB_RUN_ID`, `GITHUB_SERVER_URL`, `GITHUB_REPOSITORY`, `CI_JOB_URL` | `ci` | CI build information |
|
||||
| `GITHUB_SHA`, `CI_COMMIT_SHA`, `GIT_SHA` | `commit` | Git commit identifiers |
|
||||
| System hostname | `hostname` | Machine hostname |
|
||||
|
||||
This makes it easier to track which environment and commit a particular test run was for.
|
||||
|
||||
#### Current Limitations
|
||||
|
||||
The JUnit reporter currently has a few limitations that will be addressed in future updates:
|
||||
|
||||
- `stdout` and `stderr` output from individual tests are not included in the report
|
||||
- Precise timestamp fields per test case are not included
|
||||
|
||||
### GitHub Actions reporter
|
||||
|
||||
Bun test automatically detects when it's running inside GitHub Actions and emits GitHub Actions annotations to the console directly. No special configuration is needed beyond installing Bun and running `bun test`.
|
||||
|
||||
For a GitHub Actions workflow configuration example, see the [CI/CD integration](../cli/test.md#cicd-integration) section of the CLI documentation.
|
||||
|
||||
## Custom Reporters
|
||||
|
||||
Bun allows developers to implement custom test reporters by extending the WebKit Inspector Protocol with additional testing-specific domains.
|
||||
|
||||
### Inspector Protocol for Testing
|
||||
|
||||
To support test reporting, Bun extends the standard WebKit Inspector Protocol with two custom domains:
|
||||
|
||||
1. **TestReporter**: Reports test discovery, execution start, and completion events
|
||||
2. **LifecycleReporter**: Reports errors and exceptions during test execution
|
||||
|
||||
These extensions allow you to build custom reporting tools that can receive detailed information about test execution in real-time.
|
||||
|
||||
### Key Events
|
||||
|
||||
Custom reporters can listen for these key events:
|
||||
|
||||
- `TestReporter.found`: Emitted when a test is discovered
|
||||
- `TestReporter.start`: Emitted when a test starts running
|
||||
- `TestReporter.end`: Emitted when a test completes
|
||||
- `Console.messageAdded`: Emitted when console output occurs during a test
|
||||
- `LifecycleReporter.error`: Emitted when an error or exception occurs
|
||||
93
docs/test/runtime-behavior.md
Normal file
93
docs/test/runtime-behavior.md
Normal file
@@ -0,0 +1,93 @@
|
||||
`bun test` is deeply integrated with Bun's runtime. This is part of what makes `bun test` fast and simple to use.
|
||||
|
||||
#### `$NODE_ENV` environment variable
|
||||
|
||||
`bun test` automatically sets `$NODE_ENV` to `"test"` unless it's already set in the environment or via .env files. This is standard behavior for most test runners and helps ensure consistent test behavior.
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("NODE_ENV is set to test", () => {
|
||||
expect(process.env.NODE_ENV).toBe("test");
|
||||
});
|
||||
```
|
||||
|
||||
#### `$TZ` environment variable
|
||||
|
||||
By default, all `bun test` runs use UTC (`Etc/UTC`) as the time zone unless overridden by the `TZ` environment variable. This ensures consistent date and time behavior across different development environments.
|
||||
|
||||
#### Test Timeouts
|
||||
|
||||
Each test has a default timeout of 5000ms (5 seconds) if not explicitly overridden. Tests that exceed this timeout will fail. This can be changed globally with the `--timeout` flag or per-test as the third parameter to the test function.
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Unhandled Errors
|
||||
|
||||
`bun test` tracks unhandled promise rejections and errors that occur between tests. If such errors occur, the final exit code will be non-zero (specifically, the count of such errors), even if all tests pass.
|
||||
|
||||
This helps catch errors in asynchronous code that might otherwise go unnoticed:
|
||||
|
||||
```ts
|
||||
import { test } from "bun:test";
|
||||
|
||||
test("test 1", () => {
|
||||
// This test passes
|
||||
});
|
||||
|
||||
// This error happens outside any test
|
||||
setTimeout(() => {
|
||||
throw new Error("Unhandled error");
|
||||
}, 0);
|
||||
|
||||
test("test 2", () => {
|
||||
// This test also passes
|
||||
});
|
||||
|
||||
// The test run will still fail with a non-zero exit code
|
||||
// because of the unhandled error
|
||||
```
|
||||
|
||||
Internally, this occurs with a higher precedence than `process.on("unhandledRejection")` or `process.on("uncaughtException")`, which makes it simpler to integrate with existing code.
|
||||
|
||||
## Using General CLI Flags with Tests
|
||||
|
||||
Several Bun CLI flags can be used with `bun test` to modify its behavior:
|
||||
|
||||
### Memory Usage
|
||||
|
||||
- `--smol`: Reduces memory usage for the test runner VM
|
||||
|
||||
### Debugging
|
||||
|
||||
- `--inspect`, `--inspect-brk`: Attaches the debugger to the test runner process
|
||||
|
||||
### Module Loading
|
||||
|
||||
- `--preload`: Runs scripts before test files (useful for global setup/mocks)
|
||||
- `--define`: Sets compile-time constants
|
||||
- `--loader`: Configures custom loaders
|
||||
- `--tsconfig-override`: Uses a different tsconfig
|
||||
- `--conditions`: Sets package.json conditions for module resolution
|
||||
- `--env-file`: Loads environment variables for tests
|
||||
|
||||
### Installation-related Flags
|
||||
|
||||
- `--prefer-offline`, `--frozen-lockfile`, etc.: Affect any network requests or auto-installs during test execution
|
||||
|
||||
## Watch and Hot Reloading
|
||||
|
||||
When running `bun test` with the `--watch` flag, the test runner will watch for file changes and re-run affected tests.
|
||||
|
||||
The `--hot` flag provides similar functionality but is more aggressive about trying to preserve state between runs. For most test scenarios, `--watch` is the recommended option.
|
||||
|
||||
## Global Variables
|
||||
|
||||
The following globals are automatically available in test files without importing (though they can be imported from `bun:test` if preferred):
|
||||
|
||||
- `test`, `it`: Define tests
|
||||
- `describe`: Group tests
|
||||
- `expect`: Make assertions
|
||||
- `beforeAll`, `beforeEach`, `afterAll`, `afterEach`: Lifecycle hooks
|
||||
- `jest`: Jest global object
|
||||
- `vi`: Vitest compatibility alias for common jest methods
|
||||
@@ -1,3 +1,7 @@
|
||||
Snapshot testing saves the output of a value and compares it against future test runs. This is particularly useful for UI components, complex objects, or any output that needs to remain consistent.
|
||||
|
||||
## Basic snapshots
|
||||
|
||||
Snapshot tests are written using the `.toMatchSnapshot()` matcher:
|
||||
|
||||
```ts
|
||||
@@ -13,3 +17,52 @@ The first time this test is run, the argument to `expect` will be serialized and
|
||||
```bash
|
||||
$ bun test --update-snapshots
|
||||
```
|
||||
|
||||
## Inline snapshots
|
||||
|
||||
For smaller values, you can use inline snapshots with `.toMatchInlineSnapshot()`. These snapshots are stored directly in your test file:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("inline snapshot", () => {
|
||||
// First run: snapshot will be inserted automatically
|
||||
expect({ hello: "world" }).toMatchInlineSnapshot();
|
||||
|
||||
// After first run, the test file will be updated to:
|
||||
// expect({ hello: "world" }).toMatchInlineSnapshot(`
|
||||
// {
|
||||
// "hello": "world",
|
||||
// }
|
||||
// `);
|
||||
});
|
||||
```
|
||||
|
||||
When you run the test, Bun automatically updates the test file itself with the generated snapshot string. This makes the tests more portable and easier to understand, since the expected output is right next to the test.
|
||||
|
||||
### Using inline snapshots
|
||||
|
||||
1. Write your test with `.toMatchInlineSnapshot()`
|
||||
2. Run the test once
|
||||
3. Bun automatically updates your test file with the snapshot
|
||||
4. On subsequent runs, the value will be compared against the inline snapshot
|
||||
|
||||
Inline snapshots are particularly useful for small, simple values where it's helpful to see the expected output right in the test file.
|
||||
|
||||
## Error snapshots
|
||||
|
||||
You can also snapshot error messages using `.toThrowErrorMatchingSnapshot()` and `.toThrowErrorMatchingInlineSnapshot()`:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("error snapshot", () => {
|
||||
expect(() => {
|
||||
throw new Error("Something went wrong");
|
||||
}).toThrowErrorMatchingSnapshot();
|
||||
|
||||
expect(() => {
|
||||
throw new Error("Another error");
|
||||
}).toThrowErrorMatchingInlineSnapshot();
|
||||
});
|
||||
```
|
||||
|
||||
@@ -74,9 +74,29 @@ test("it was 2020, for a moment.", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## Get mocked time with `jest.now()`
|
||||
|
||||
When you're using mocked time (with `setSystemTime` or `useFakeTimers`), you can use `jest.now()` to get the current mocked timestamp:
|
||||
|
||||
```ts
|
||||
import { test, expect, jest } from "bun:test";
|
||||
|
||||
test("get the current mocked time", () => {
|
||||
jest.useFakeTimers();
|
||||
jest.setSystemTime(new Date("2020-01-01T00:00:00.000Z"));
|
||||
|
||||
expect(Date.now()).toBe(1577836800000); // Jan 1, 2020 timestamp
|
||||
expect(jest.now()).toBe(1577836800000); // Same value
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
```
|
||||
|
||||
This is useful when you need to access the mocked time directly without creating a new Date object.
|
||||
|
||||
## Set the time zone
|
||||
|
||||
To change the time zone, either pass the `$TZ` environment variable to `bun test`.
|
||||
By default, the time zone for all `bun test` runs is set to UTC (`Etc/UTC`) unless overridden. To change the time zone, either pass the `$TZ` environment variable to `bun test`.
|
||||
|
||||
```sh
|
||||
TZ=America/Los_Angeles bun test
|
||||
|
||||
@@ -78,9 +78,11 @@ test("wat", async () => {
|
||||
|
||||
In `bun:test`, test timeouts throw an uncatchable exception to force the test to stop running and fail. We also kill any child processes that were spawned in the test to avoid leaving behind zombie processes lurking in the background.
|
||||
|
||||
The default timeout for each test is 5000ms (5 seconds) if not overridden by this timeout option or `jest.setDefaultTimeout()`.
|
||||
|
||||
### 🧟 Zombie process killer
|
||||
|
||||
When a test times out and processes spawned in the test via `Bun.spawn`, `Bun.spawnSync`, or `node:child_process` are not killed, they will be automatically killed and a message will be logged to the console.
|
||||
When a test times out and processes spawned in the test via `Bun.spawn`, `Bun.spawnSync`, or `node:child_process` are not killed, they will be automatically killed and a message will be logged to the console. This prevents zombie processes from lingering in the background after timed-out tests.
|
||||
|
||||
## `test.skip`
|
||||
|
||||
@@ -125,7 +127,7 @@ fix the test.
|
||||
|
||||
## `test.only`
|
||||
|
||||
To run a particular test or suite of tests use `test.only()` or `describe.only()`. Once declared, running `bun test --only` will only execute tests/suites that have been marked with `.only()`. Running `bun test` without the `--only` option with `test.only()` declared will result in all tests in the given suite being executed _up to_ the test with `.only()`. `describe.only()` functions the same in both execution scenarios.
|
||||
To run a particular test or suite of tests use `test.only()` or `describe.only()`.
|
||||
|
||||
```ts
|
||||
import { test, describe } from "bun:test";
|
||||
@@ -197,22 +199,121 @@ test.todoIf(macOS)("runs on posix", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.each`
|
||||
## `test.failing`
|
||||
|
||||
To return a function for multiple cases in a table of tests, use `test.each`.
|
||||
Use `test.failing()` when you know a test is currently failing but you want to track it and be notified when it starts passing. This inverts the test result:
|
||||
|
||||
- A failing test marked with `.failing()` will pass
|
||||
- A passing test marked with `.failing()` will fail (with a message indicating it's now passing and should be fixed)
|
||||
|
||||
```ts
|
||||
// This will pass because the test is failing as expected
|
||||
test.failing("math is broken", () => {
|
||||
expect(0.1 + 0.2).toBe(0.3); // fails due to floating point precision
|
||||
});
|
||||
|
||||
// This will fail with a message that the test is now passing
|
||||
test.failing("fixed bug", () => {
|
||||
expect(1 + 1).toBe(2); // passes, but we expected it to fail
|
||||
});
|
||||
```
|
||||
|
||||
This is useful for tracking known bugs that you plan to fix later, or for implementing test-driven development.
|
||||
|
||||
## Conditional Tests for Describe Blocks
|
||||
|
||||
The conditional modifiers `.if()`, `.skipIf()`, and `.todoIf()` can also be applied to `describe` blocks, affecting all tests within the suite:
|
||||
|
||||
```ts
|
||||
const isMacOS = process.platform === "darwin";
|
||||
|
||||
// Only runs the entire suite on macOS
|
||||
describe.if(isMacOS)("macOS-specific features", () => {
|
||||
test("feature A", () => {
|
||||
// only runs on macOS
|
||||
});
|
||||
|
||||
test("feature B", () => {
|
||||
// only runs on macOS
|
||||
});
|
||||
});
|
||||
|
||||
// Skips the entire suite on Windows
|
||||
describe.skipIf(process.platform === "win32")("Unix features", () => {
|
||||
test("feature C", () => {
|
||||
// skipped on Windows
|
||||
});
|
||||
});
|
||||
|
||||
// Marks the entire suite as TODO on Linux
|
||||
describe.todoIf(process.platform === "linux")("Upcoming Linux support", () => {
|
||||
test("feature D", () => {
|
||||
// marked as TODO on Linux
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## `test.each` and `describe.each`
|
||||
|
||||
To run the same test with multiple sets of data, use `test.each`. This creates a parametrized test that runs once for each test case provided.
|
||||
|
||||
```ts
|
||||
const cases = [
|
||||
[1, 2, 3],
|
||||
[3, 4, 5],
|
||||
[3, 4, 7],
|
||||
];
|
||||
|
||||
test.each(cases)("%p + %p should be %p", (a, b, expected) => {
|
||||
// runs once for each test case provided
|
||||
expect(a + b).toBe(expected);
|
||||
});
|
||||
```
|
||||
|
||||
There are a number of options available for formatting the case label depending on its type.
|
||||
You can also use `describe.each` to create a parametrized suite that runs once for each test case:
|
||||
|
||||
```ts
|
||||
describe.each([
|
||||
[1, 2, 3],
|
||||
[3, 4, 7],
|
||||
])("add(%i, %i)", (a, b, expected) => {
|
||||
test(`returns ${expected}`, () => {
|
||||
expect(a + b).toBe(expected);
|
||||
});
|
||||
|
||||
test(`sum is greater than each value`, () => {
|
||||
expect(a + b).toBeGreaterThan(a);
|
||||
expect(a + b).toBeGreaterThan(b);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Argument Passing
|
||||
|
||||
How arguments are passed to your test function depends on the structure of your test cases:
|
||||
|
||||
- If a table row is an array (like `[1, 2, 3]`), each element is passed as an individual argument
|
||||
- If a row is not an array (like an object), it's passed as a single argument
|
||||
|
||||
```ts
|
||||
// Array items passed as individual arguments
|
||||
test.each([
|
||||
[1, 2, 3],
|
||||
[4, 5, 9],
|
||||
])("add(%i, %i) = %i", (a, b, expected) => {
|
||||
expect(a + b).toBe(expected);
|
||||
});
|
||||
|
||||
// Object items passed as a single argument
|
||||
test.each([
|
||||
{ a: 1, b: 2, expected: 3 },
|
||||
{ a: 4, b: 5, expected: 9 },
|
||||
])("add($a, $b) = $expected", data => {
|
||||
expect(data.a + data.b).toBe(data.expected);
|
||||
});
|
||||
```
|
||||
|
||||
### Format Specifiers
|
||||
|
||||
There are a number of options available for formatting the test title:
|
||||
|
||||
{% table %}
|
||||
|
||||
@@ -263,6 +364,68 @@ There are a number of options available for formatting the case label depending
|
||||
|
||||
{% /table %}
|
||||
|
||||
#### Examples
|
||||
|
||||
```ts
|
||||
// Basic specifiers
|
||||
test.each([
|
||||
["hello", 123],
|
||||
["world", 456],
|
||||
])("string: %s, number: %i", (str, num) => {
|
||||
// "string: hello, number: 123"
|
||||
// "string: world, number: 456"
|
||||
});
|
||||
|
||||
// %p for pretty-format output
|
||||
test.each([
|
||||
[{ name: "Alice" }, { a: 1, b: 2 }],
|
||||
[{ name: "Bob" }, { x: 5, y: 10 }],
|
||||
])("user %p with data %p", (user, data) => {
|
||||
// "user { name: 'Alice' } with data { a: 1, b: 2 }"
|
||||
// "user { name: 'Bob' } with data { x: 5, y: 10 }"
|
||||
});
|
||||
|
||||
// %# for index
|
||||
test.each(["apple", "banana"])("fruit #%# is %s", fruit => {
|
||||
// "fruit #0 is apple"
|
||||
// "fruit #1 is banana"
|
||||
});
|
||||
```
|
||||
|
||||
## Assertion Counting
|
||||
|
||||
Bun supports verifying that a specific number of assertions were called during a test:
|
||||
|
||||
### expect.hasAssertions()
|
||||
|
||||
Use `expect.hasAssertions()` to verify that at least one assertion is called during a test:
|
||||
|
||||
```ts
|
||||
test("async work calls assertions", async () => {
|
||||
expect.hasAssertions(); // Will fail if no assertions are called
|
||||
|
||||
const data = await fetchData();
|
||||
expect(data).toBeDefined();
|
||||
});
|
||||
```
|
||||
|
||||
This is especially useful for async tests to ensure your assertions actually run.
|
||||
|
||||
### expect.assertions(count)
|
||||
|
||||
Use `expect.assertions(count)` to verify that a specific number of assertions are called during a test:
|
||||
|
||||
```ts
|
||||
test("exactly two assertions", () => {
|
||||
expect.assertions(2); // Will fail if not exactly 2 assertions are called
|
||||
|
||||
expect(1 + 1).toBe(2);
|
||||
expect("hello").toContain("ell");
|
||||
});
|
||||
```
|
||||
|
||||
This helps ensure all your assertions run, especially in complex async code with multiple code paths.
|
||||
|
||||
## Matchers
|
||||
|
||||
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).
|
||||
|
||||
@@ -17,7 +17,7 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
|
||||
```jsonc
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Enable latest features
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
@@ -35,12 +35,13 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
|
||||
// Some stricter flags
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noPropertyAccessFromIndexSignature": true
|
||||
}
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
|
||||
142
misctools/gdb/std_gdb_pretty_printers.py
Normal file
142
misctools/gdb/std_gdb_pretty_printers.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# pretty printing for the standard library.
|
||||
# put "source /path/to/stage2_gdb_pretty_printers.py" in ~/.gdbinit to load it automatically.
|
||||
import re
|
||||
import gdb.printing
|
||||
|
||||
# Handles both ArrayList and ArrayListUnmanaged.
|
||||
class ArrayListPrinter:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
type = self.val.type.name[len('std.array_list.'):]
|
||||
type = re.sub(r'^ArrayListAligned(Unmanaged)?\((.*),null\)$', r'ArrayList\1(\2)', type)
|
||||
return '%s of length %s, capacity %s' % (type, self.val['items']['len'], self.val['capacity'])
|
||||
|
||||
def children(self):
|
||||
for i in range(self.val['items']['len']):
|
||||
item = self.val['items']['ptr'] + i
|
||||
yield ('[%d]' % i, item.dereference())
|
||||
|
||||
def display_hint(self):
|
||||
return 'array'
|
||||
|
||||
class MultiArrayListPrinter:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def child_type(self):
|
||||
(helper_fn, _) = gdb.lookup_symbol('%s.dbHelper' % self.val.type.name)
|
||||
return helper_fn.type.fields()[1].type.target()
|
||||
|
||||
def to_string(self):
|
||||
type = self.val.type.name[len('std.multi_array_list.'):]
|
||||
return '%s of length %s, capacity %s' % (type, self.val['len'], self.val['capacity'])
|
||||
|
||||
def slice(self):
|
||||
fields = self.child_type().fields()
|
||||
base = self.val['bytes']
|
||||
cap = self.val['capacity']
|
||||
len = self.val['len']
|
||||
|
||||
if len == 0:
|
||||
return
|
||||
|
||||
fields = sorted(fields, key=lambda field: field.type.alignof, reverse=True)
|
||||
|
||||
for field in fields:
|
||||
ptr = base.cast(field.type.pointer()).dereference().cast(field.type.array(len - 1))
|
||||
base += field.type.sizeof * cap
|
||||
yield (field.name, ptr)
|
||||
|
||||
def children(self):
|
||||
for i, (name, ptr) in enumerate(self.slice()):
|
||||
yield ('[%d]' % i, name)
|
||||
yield ('[%d]' % i, ptr)
|
||||
|
||||
def display_hint(self):
|
||||
return 'map'
|
||||
|
||||
# Handles both HashMap and HashMapUnmanaged.
|
||||
class HashMapPrinter:
|
||||
def __init__(self, val):
|
||||
self.type = val.type
|
||||
is_managed = re.search(r'^std\.hash_map\.HashMap\(', self.type.name)
|
||||
self.val = val['unmanaged'] if is_managed else val
|
||||
|
||||
def header_ptr_type(self):
|
||||
(helper_fn, _) = gdb.lookup_symbol('%s.dbHelper' % self.val.type.name)
|
||||
return helper_fn.type.fields()[1].type
|
||||
|
||||
def header(self):
|
||||
if self.val['metadata'] == 0:
|
||||
return None
|
||||
return (self.val['metadata'].cast(self.header_ptr_type()) - 1).dereference()
|
||||
|
||||
def to_string(self):
|
||||
type = self.type.name[len('std.hash_map.'):]
|
||||
type = re.sub(r'^HashMap(Unmanaged)?\((.*),std.hash_map.AutoContext\(.*$', r'AutoHashMap\1(\2)', type)
|
||||
hdr = self.header()
|
||||
if hdr is not None:
|
||||
cap = hdr['capacity']
|
||||
else:
|
||||
cap = 0
|
||||
return '%s of length %s, capacity %s' % (type, self.val['size'], cap)
|
||||
|
||||
def children(self):
|
||||
hdr = self.header()
|
||||
if hdr is None:
|
||||
return
|
||||
is_map = self.display_hint() == 'map'
|
||||
for i in range(hdr['capacity']):
|
||||
metadata = self.val['metadata'] + i
|
||||
if metadata.dereference()['used'] == 1:
|
||||
yield ('[%d]' % i, (hdr['keys'] + i).dereference())
|
||||
if is_map:
|
||||
yield ('[%d]' % i, (hdr['values'] + i).dereference())
|
||||
|
||||
def display_hint(self):
|
||||
for field in self.header_ptr_type().target().fields():
|
||||
if field.name == 'values':
|
||||
return 'map'
|
||||
return 'array'
|
||||
|
||||
# Handles both ArrayHashMap and ArrayHashMapUnmanaged.
|
||||
class ArrayHashMapPrinter:
|
||||
def __init__(self, val):
|
||||
self.type = val.type
|
||||
is_managed = re.search(r'^std\.array_hash_map\.ArrayHashMap\(', self.type.name)
|
||||
self.val = val['unmanaged'] if is_managed else val
|
||||
|
||||
def to_string(self):
|
||||
type = self.type.name[len('std.array_hash_map.'):]
|
||||
type = re.sub(r'^ArrayHashMap(Unmanaged)?\((.*),std.array_hash_map.AutoContext\(.*$', r'AutoArrayHashMap\1(\2)', type)
|
||||
return '%s of length %s' % (type, self.val['entries']['len'])
|
||||
|
||||
def children(self):
|
||||
entries = MultiArrayListPrinter(self.val['entries'])
|
||||
len = self.val['entries']['len']
|
||||
fields = {}
|
||||
for name, ptr in entries.slice():
|
||||
fields[str(name)] = ptr
|
||||
|
||||
for i in range(len):
|
||||
if 'key' in fields:
|
||||
yield ('[%d]' % i, fields['key'][i])
|
||||
else:
|
||||
yield ('[%d]' % i, '{}')
|
||||
if 'value' in fields:
|
||||
yield ('[%d]' % i, fields['value'][i])
|
||||
|
||||
def display_hint(self):
|
||||
for name, ptr in MultiArrayListPrinter(self.val['entries']).slice():
|
||||
if name == 'value':
|
||||
return 'map'
|
||||
return 'array'
|
||||
|
||||
pp = gdb.printing.RegexpCollectionPrettyPrinter('Zig standard library')
|
||||
pp.add_printer('ArrayList', r'^std\.array_list\.ArrayListAligned(Unmanaged)?\(.*\)$', ArrayListPrinter)
|
||||
pp.add_printer('MultiArrayList', r'^std\.multi_array_list\.MultiArrayList\(.*\)$', MultiArrayListPrinter)
|
||||
pp.add_printer('HashMap', r'^std\.hash_map\.HashMap(Unmanaged)?\(.*\)$', HashMapPrinter)
|
||||
pp.add_printer('ArrayHashMap', r'^std\.array_hash_map\.ArrayHashMap(Unmanaged)?\(.*\)$', ArrayHashMapPrinter)
|
||||
gdb.printing.register_pretty_printer(gdb.current_objfile(), pp)
|
||||
63
misctools/gdb/zig_gdb_pretty_printers.py
Normal file
63
misctools/gdb/zig_gdb_pretty_printers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# pretty printing for the language.
|
||||
# put "source /path/to/zig_gdb_pretty_printers.py" in ~/.gdbinit to load it automatically.
|
||||
import gdb.printing
|
||||
|
||||
|
||||
class ZigPrettyPrinter(gdb.printing.PrettyPrinter):
|
||||
def __init__(self):
|
||||
super().__init__('Zig')
|
||||
|
||||
def __call__(self, val):
|
||||
tag = val.type.tag
|
||||
if tag is None:
|
||||
return None
|
||||
if tag == '[]u8':
|
||||
return StringPrinter(val)
|
||||
if tag.startswith('[]'):
|
||||
return SlicePrinter(val)
|
||||
if tag.startswith('?'):
|
||||
return OptionalPrinter(val)
|
||||
return None
|
||||
|
||||
|
||||
class SlicePrinter:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
return f"{self.val['len']} items at {self.val['ptr']}"
|
||||
|
||||
def children(self):
|
||||
def it(val):
|
||||
for i in range(int(val['len'])):
|
||||
item = val['ptr'] + i
|
||||
yield (f'[{i}]', item.dereference())
|
||||
return it(self.val)
|
||||
|
||||
def display_hint(self):
|
||||
return 'array'
|
||||
|
||||
|
||||
class StringPrinter:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
return self.val['ptr'].string(length=int(self.val['len']))
|
||||
|
||||
def display_hint(self):
|
||||
return 'string'
|
||||
|
||||
|
||||
class OptionalPrinter:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
if self.val['some']:
|
||||
return self.val['data']
|
||||
else:
|
||||
return 'null'
|
||||
|
||||
|
||||
gdb.printing.register_pretty_printer(gdb.current_objfile(), ZigPrettyPrinter())
|
||||
@@ -98,7 +98,7 @@ chunks.push(`// Auto-generated file. Do not edit.
|
||||
// This used to be a comptime block, but it made the build too slow.
|
||||
// Compressing the completions list saves about 100 KB of binary size.
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const zstd = bun.zstd;
|
||||
const Environment = bun.Environment;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
@@ -12,7 +12,6 @@ const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
const Headers = @import("../src/bun.js/webcore/response.zig").Headers;
|
||||
const Method = @import("../src/http/method.zig").Method;
|
||||
const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType;
|
||||
const HeadersTuple = ColonListType(string, noop_resolver);
|
||||
|
||||
@@ -61,7 +61,6 @@ zig_keywords = {
|
||||
'try',
|
||||
'union',
|
||||
'unreachable',
|
||||
'usingnamespace',
|
||||
'var',
|
||||
'volatile',
|
||||
'while',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// most of this file is copy pasted from other files in misctools
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
const bun = @import("root").bun;
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
|
||||
41
oxlint.json
41
oxlint.json
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oxc-project/oxc/refs/heads/main/npm/oxlint/configuration_schema.json",
|
||||
"categories": {
|
||||
"correctness": "warn" // TODO: gradually fix bugs and turn this to error
|
||||
"correctness": "error"
|
||||
},
|
||||
"rules": {
|
||||
"const-comparisons": "off", // TODO: there's a bug when comparing private identifiers. Re-enable once it's fixed.
|
||||
"const-comparisons": "error",
|
||||
"no-cond-assign": "error",
|
||||
"no-const-assign": "error",
|
||||
"no-debugger": "error",
|
||||
@@ -13,12 +13,35 @@
|
||||
"no-empty-pattern": "error",
|
||||
"import/no-duplicates": "error",
|
||||
|
||||
"no-useless-escape": "off" // there's a lot of these. Should be fixed eventually.
|
||||
"no-control-regex": "off",
|
||||
|
||||
"no-useless-escape": "off",
|
||||
"no-this-alias": "off", // many intentional this aliases
|
||||
"triple-slash-reference": "off", // many intentional triple slash references
|
||||
|
||||
// This rule is dumb.
|
||||
// Array.from is MUCH slower than new Array(size).
|
||||
"no-new-array": "off",
|
||||
|
||||
// We have custom thenables. This is not a bug.
|
||||
"no-thenable": "off",
|
||||
|
||||
"no-undef-init": "error",
|
||||
|
||||
// We use this in some cases. The ordering is deliberate.
|
||||
"no-unsafe-finally": "off",
|
||||
|
||||
// We use !!$debug to check if the debugger is enabled.
|
||||
// Boolean() is also generally slower than !!.
|
||||
"no-extra-boolean-cast": "off",
|
||||
|
||||
// Eslint is not a type checker.
|
||||
"no-throw-literal": "off"
|
||||
},
|
||||
"ignorePatterns": [
|
||||
"vendor",
|
||||
"build",
|
||||
"test/snapshots/**",
|
||||
"bench",
|
||||
"bench/react-hello-world/*.js",
|
||||
"bun.lock",
|
||||
|
||||
@@ -29,8 +52,16 @@
|
||||
"test/js/**/*bad.js",
|
||||
"test/bundler/transpiler/decorators.test.ts", // uses `arguments` as decorator
|
||||
"test/bundler/native-plugin.test.ts", // parser doesn't handle import metadata
|
||||
"test/bundler/transpiler/with-statement-works.js" // parser doesn't allow `with` statement
|
||||
"test/bundler/transpiler/with-statement-works.js", // parser doesn't allow `with` statement
|
||||
"test/js/node/module/extensions-fixture", // these files are not meant to be linted
|
||||
"test/cli/run/module-type-fixture",
|
||||
"test/bundler/transpiler/with-statement-works.js", // parser doesn't allow `with` statement
|
||||
|
||||
// TODO: fix these
|
||||
"src/js/node/http2.ts",
|
||||
"src/js/node/http.ts"
|
||||
],
|
||||
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["test/**", "examples/**", "packages/bun-internal/test/runners/**"],
|
||||
|
||||
13
package.json
13
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.6",
|
||||
"version": "1.2.12",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
@@ -31,6 +31,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun run build:debug",
|
||||
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
@@ -44,17 +46,21 @@
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"css-properties": "bun run src/css/properties/generate_properties.ts",
|
||||
"uv-posix-stubs": "bun run src/bun.js/bindings/libuv/generate_uv_posix_stubs.ts",
|
||||
"bump": "bun ./scripts/bump.ts",
|
||||
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
|
||||
"fmt": "bun run prettier",
|
||||
"fmt:cpp": "bun run clang-format",
|
||||
"fmt:zig": "bun run zig-format",
|
||||
"lint": "oxlint --config oxlint.json",
|
||||
"lint": "bunx oxlint --config=oxlint.json --format=github src/js",
|
||||
"lint:fix": "oxlint --config oxlint.json --fix",
|
||||
"test": "node scripts/runner.node.mjs --exec-path ./build/debug/bun-debug",
|
||||
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",
|
||||
"banned": "bun test test/internal/ban-words.test.ts",
|
||||
"zig": "vendor/zig/zig.exe",
|
||||
"zig:test": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DBUN_TEST=ON -B build/debug",
|
||||
"zig:test:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DBUNTEST=ON -B build/release",
|
||||
"zig:test:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DBUN_TEST=ON -DZIG_OPTIMIZE=ReleaseSafe -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
"zig:fmt": "bun run zig-format",
|
||||
"zig:check": "bun run zig build check --summary new",
|
||||
"zig:check-all": "bun run zig build check-all --summary new",
|
||||
@@ -74,6 +80,7 @@
|
||||
"prettier:check": "bun run analysis:no-llvm --target prettier-check",
|
||||
"prettier:extra": "bun run analysis:no-llvm --target prettier-extra",
|
||||
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff",
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests "
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true"
|
||||
}
|
||||
}
|
||||
|
||||
1
packages/bun-inspector-protocol/.gitignore
vendored
1
packages/bun-inspector-protocol/.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
index.js
|
||||
protocol/*.json
|
||||
protocol/v8
|
||||
|
||||
@@ -1 +1,354 @@
|
||||
# bun-inspector-protocol
|
||||
|
||||
`bun-inspector-protocol` is a TypeScript library that provides a comprehensive interface for interacting with the WebKit Inspector Protocol. This package makes it easy to build debugging tools, IDE integrations, and other developer tools that communicate with Bun's JavaScript runtime.
|
||||
|
||||
You can use this library with Node.js or Bun.
|
||||
|
||||
## Overview
|
||||
|
||||
The WebKit Inspector Protocol is a JSON-based protocol similar to the Chrome DevTools Protocol. It allows external tools to interact with Bun's JavaScript runtime for debugging, profiling, and instrumentation purposes.
|
||||
|
||||
## Features
|
||||
|
||||
- 🌐 **WebSocket communication**: Connect to Bun's debugging endpoint via WebSockets
|
||||
- 🔌 **Socket communication**: Connect via Unix/TCP sockets for local debugging
|
||||
- 🔄 **Full API typing**: Complete TypeScript definitions for the protocol
|
||||
- 📊 **Object preview utilities**: Format runtime objects for display
|
||||
- 🔄 **Event-driven architecture**: Subscribe to specific debugging events
|
||||
- 🧩 **Promise-based API**: Clean, modern async interface
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
bun add bun-inspector-protocol
|
||||
# npm install bun-inspector-protocol
|
||||
# yarn add bun-inspector-protocol
|
||||
# pnpm add bun-inspector-protocol
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
The first step is to spawn a Bun process with the inspector attached. There are a few different ways to do this.
|
||||
|
||||
The `--inspect-wait` flag is the easiest way to spawn a Bun process with the inspector attached.
|
||||
|
||||
```bash
|
||||
bun --inspect-wait my-script.ts
|
||||
```
|
||||
|
||||
From there, it will start a WebSocket server defaulting to port 9229 and you will need to read the output from stdout to get the URL of the inspector:
|
||||
|
||||
```bash
|
||||
bun --inspect-wait my-script.ts 2>&1 | grep -o '\sws://.*$'
|
||||
```
|
||||
|
||||
From there, you can connect to the inspector using the `WebSocketInspector` class:
|
||||
|
||||
```typescript
|
||||
import { WebSocketInspector } from "bun-inspector-protocol";
|
||||
|
||||
// Create a new inspector client
|
||||
const inspector = new WebSocketInspector("ws://localhost:9229/ws");
|
||||
```
|
||||
|
||||
### Connecting via WebSocket
|
||||
|
||||
```typescript
|
||||
import { WebSocketInspector } from "bun-inspector-protocol";
|
||||
|
||||
// Create a new inspector client
|
||||
const inspector = new WebSocketInspector("ws://localhost:9229/ws");
|
||||
|
||||
// Listen for connection events
|
||||
inspector.on("Inspector.connected", () => {
|
||||
console.log("Connected to debugger!");
|
||||
});
|
||||
|
||||
inspector.on("Inspector.error", error => {
|
||||
console.error("Inspector error:", error);
|
||||
});
|
||||
|
||||
// Connect to the debugger
|
||||
await inspector.start();
|
||||
|
||||
// Enable the Runtime domain
|
||||
await inspector.send("Runtime.enable");
|
||||
|
||||
// Execute some code in the target context
|
||||
const result = await inspector.send("Runtime.evaluate", {
|
||||
expression: "2 + 2",
|
||||
returnByValue: true,
|
||||
});
|
||||
|
||||
console.log("Evaluation result:", result.result.value); // 4
|
||||
|
||||
// Close the connection
|
||||
inspector.close();
|
||||
```
|
||||
|
||||
### Connecting via Socket (for Local Debugging)
|
||||
|
||||
```typescript
|
||||
import { NodeSocketInspector } from "bun-inspector-protocol";
|
||||
import { Socket } from "node:net";
|
||||
|
||||
// Create a socket connection
|
||||
const socket = new Socket();
|
||||
socket.connect("/path/to/debug/socket");
|
||||
|
||||
// Create a new inspector client
|
||||
const inspector = new NodeSocketInspector(socket);
|
||||
|
||||
// Set up event listeners and use the API as with WebSocketInspector
|
||||
inspector.on("Inspector.connected", () => {
|
||||
console.log("Connected to debugger via socket!");
|
||||
});
|
||||
|
||||
await inspector.start();
|
||||
// Use the same API as WebSocketInspector from here...
|
||||
```
|
||||
|
||||
## Event Handling
|
||||
|
||||
The inspector emits various events you can listen for:
|
||||
|
||||
```typescript
|
||||
// Listen for specific protocol events
|
||||
inspector.on("Debugger.scriptParsed", params => {
|
||||
console.log("Script parsed:", params.url);
|
||||
});
|
||||
|
||||
// Listen for breakpoint hits
|
||||
inspector.on("Debugger.paused", params => {
|
||||
console.log("Execution paused at:", params.callFrames[0].location);
|
||||
});
|
||||
|
||||
// Listen for console messages
|
||||
inspector.on("Runtime.consoleAPICalled", params => {
|
||||
console.log(
|
||||
"Console message:",
|
||||
params.args
|
||||
.map(arg =>
|
||||
// Use the included utility to format objects
|
||||
remoteObjectToString(arg, true),
|
||||
)
|
||||
.join(" "),
|
||||
);
|
||||
});
|
||||
```
|
||||
|
||||
## Protocol Domains
|
||||
|
||||
The WebKit Inspector Protocol is organized into domains that group related functionality. Based on the JavaScriptCore protocol implementation, the following domains are available:
|
||||
|
||||
### Console Domain
|
||||
|
||||
- Console message capturing and monitoring
|
||||
- Support for different logging channels and levels (xml, javascript, network, etc.)
|
||||
- Methods: `enable`, `disable`, `clearMessages`, `setLoggingChannelLevel`, etc.
|
||||
- Events: `messageAdded`, `messageRepeatCountUpdated`, `messagesCleared`
|
||||
|
||||
### Debugger Domain
|
||||
|
||||
- Comprehensive debugging capabilities
|
||||
- Setting and managing breakpoints (conditional, URL-based, symbolic)
|
||||
- Execution control (pause, resume, step, etc.)
|
||||
- Stack frame inspection and manipulation
|
||||
- Methods: `enable`, `setBreakpoint`, `resume`, `stepInto`, `evaluateOnCallFrame`, etc.
|
||||
- Events: `scriptParsed`, `breakpointResolved`, `paused`, `resumed`
|
||||
|
||||
### Heap Domain
|
||||
|
||||
- Memory management and garbage collection monitoring
|
||||
- Heap snapshot creation and analysis
|
||||
- Memory leak detection with tracking
|
||||
- Methods: `enable`, `gc`, `snapshot`, `startTracking`, `stopTracking`
|
||||
- Events: `garbageCollected`, `trackingStart`, `trackingComplete`
|
||||
|
||||
### Inspector Domain
|
||||
|
||||
- Core inspector functionality
|
||||
- Methods: `enable`, `disable`, `initialized`
|
||||
- Events: `evaluateForTestInFrontend`, `inspect`
|
||||
|
||||
### LifecycleReporter Domain
|
||||
|
||||
- Process lifecycle management
|
||||
- Error reporting
|
||||
- Methods: `enable`, `preventExit`, `stopPreventingExit`
|
||||
- Events: `reload`, `error`
|
||||
|
||||
### Runtime Domain
|
||||
|
||||
- JavaScript runtime interaction
|
||||
- Expression evaluation
|
||||
- Object property inspection
|
||||
- Promise handling
|
||||
- Type profiling and control flow analysis
|
||||
- Methods: `evaluate`, `callFunctionOn`, `getProperties`, `awaitPromise`, etc.
|
||||
- Events: `executionContextCreated`
|
||||
|
||||
### ScriptProfiler Domain
|
||||
|
||||
- Script execution profiling
|
||||
- Performance tracking
|
||||
- Methods: `startTracking`, `stopTracking`
|
||||
- Events: `trackingStart`, `trackingUpdate`, `trackingComplete`
|
||||
|
||||
### TestReporter Domain
|
||||
|
||||
- Test execution monitoring
|
||||
- Test status reporting (pass, fail, timeout, skip, todo)
|
||||
- Methods: `enable`, `disable`
|
||||
- Events: `found`, `start`, `end`
|
||||
|
||||
Each domain has its own set of commands, events, and data types. Refer to the TypeScript definitions in this package for complete API details.
|
||||
|
||||
## Working with Remote Objects
|
||||
|
||||
When evaluating expressions, you'll often receive remote object references. Use the `remoteObjectToString` utility to convert these to string representations:
|
||||
|
||||
```typescript
|
||||
import { remoteObjectToString } from "bun-inspector-protocol";
|
||||
|
||||
const result = await inspector.send("Runtime.evaluate", {
|
||||
expression: "{ a: 1, b: { c: 'hello' } }",
|
||||
});
|
||||
|
||||
console.log(remoteObjectToString(result.result, true));
|
||||
// Output: {a: 1, b: {c: "hello"}}
|
||||
```
|
||||
|
||||
## Message Structure
|
||||
|
||||
The protocol uses a simple JSON-based message format:
|
||||
|
||||
### Requests
|
||||
|
||||
```typescript
|
||||
interface Request<T> {
|
||||
id: number; // Unique request identifier
|
||||
method: string; // Domain.method name format
|
||||
params: T; // Method-specific parameters
|
||||
}
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
```typescript
|
||||
interface Response<T> {
|
||||
id: number; // Matching request identifier
|
||||
result?: T; // Method-specific result (on success)
|
||||
error?: {
|
||||
// Error information (on failure)
|
||||
code?: string;
|
||||
message: string;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Events
|
||||
|
||||
```typescript
|
||||
interface Event<T> {
|
||||
method: string; // Domain.event name format
|
||||
params: T; // Event-specific parameters
|
||||
}
|
||||
```
|
||||
|
||||
### Setting Breakpoints
|
||||
|
||||
```typescript
|
||||
// Set a breakpoint by URL
|
||||
const { breakpointId } = await inspector.send("Debugger.setBreakpointByUrl", {
|
||||
lineNumber: 42,
|
||||
url: "/app/foo.ts",
|
||||
condition: "x > 5", // Optional condition
|
||||
});
|
||||
|
||||
// Set a breakpoint with custom actions
|
||||
await inspector.send("Debugger.setBreakpoint", {
|
||||
location: { scriptId: "123", lineNumber: 10 },
|
||||
options: {
|
||||
condition: "count > 5",
|
||||
actions: [
|
||||
{ type: "log", data: "Breakpoint hit!" },
|
||||
{ type: "evaluate", data: "console.log('Custom breakpoint action')" },
|
||||
],
|
||||
autoContinue: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Remove a breakpoint
|
||||
await inspector.send("Debugger.removeBreakpoint", { breakpointId });
|
||||
```
|
||||
|
||||
### Memory Profiling
|
||||
|
||||
```typescript
|
||||
// Start heap tracking
|
||||
await inspector.send("Heap.enable");
|
||||
await inspector.send("Heap.startTracking");
|
||||
|
||||
// Listen for GC events
|
||||
inspector.on("Heap.garbageCollected", ({ collection }) => {
|
||||
console.log(
|
||||
`GC completed: ${collection.type} (${collection.endTime - collection.startTime}ms)`,
|
||||
);
|
||||
});
|
||||
|
||||
// ... perform operations to analyze ...
|
||||
|
||||
// Get heap snapshot
|
||||
const { snapshotData } = await inspector.send("Heap.stopTracking");
|
||||
// Process snapshotData to find memory leaks
|
||||
```
|
||||
|
||||
### Script Profiling
|
||||
|
||||
```typescript
|
||||
// Start script profiling with sampling
|
||||
await inspector.send("ScriptProfiler.startTracking", { includeSamples: true });
|
||||
|
||||
// Listen for profiling updates
|
||||
inspector.on("ScriptProfiler.trackingUpdate", event => {
|
||||
console.log("Profiling event:", event);
|
||||
});
|
||||
|
||||
// Stop profiling to get complete data
|
||||
inspector.on("ScriptProfiler.trackingComplete", data => {
|
||||
if (data.samples) {
|
||||
// Process stack traces
|
||||
console.log(`Collected ${data.samples.stackTraces.length} stack traces`);
|
||||
}
|
||||
});
|
||||
|
||||
await inspector.send("ScriptProfiler.stopTracking");
|
||||
```
|
||||
|
||||
## Protocol Differences from Upstream WebKit
|
||||
|
||||
Notable Bun-specific additions include:
|
||||
|
||||
- `LifecycleReporter` domain for process lifecycle management
|
||||
- Enhanced `TestReporter` domain for test framework integration
|
||||
- Additional utilities for script and heap profiling
|
||||
|
||||
## Building Tools with the Protocol
|
||||
|
||||
This library is ideal for building:
|
||||
|
||||
- IDE extensions and debuggers
|
||||
- Performance monitoring tools
|
||||
- Testing frameworks with runtime instrumentation
|
||||
- Hot module reloading systems
|
||||
- Custom REPL environments
|
||||
- Profiling and optimization tools
|
||||
|
||||
## Full API Reference
|
||||
|
||||
For complete API documentation, please refer to the TypeScript definitions included in this package. The definitions provide comprehensive information about all available commands, events, and their parameters.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export type * from "./src/inspector/index.js";
|
||||
export * from "./src/inspector/websocket.js";
|
||||
export type * from "./src/protocol/index.js";
|
||||
export * from "./src/util/preview.js";
|
||||
export type * from "./src/inspector/index";
|
||||
export * from "./src/inspector/websocket";
|
||||
export * from "./src/inspector/node-socket";
|
||||
export type * from "./src/protocol/index";
|
||||
export * from "./src/util/preview";
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
{
|
||||
"name": "bun-inspector-protocol",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.2",
|
||||
"scripts": {
|
||||
"build": "bun build --target=node --outfile=index.js --minify-syntax ./index.ts --external=ws"
|
||||
},
|
||||
"dependencies": {
|
||||
"ws": "^8.13.0"
|
||||
}
|
||||
},
|
||||
"main": "./index.js",
|
||||
"type": "module",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.ts",
|
||||
"src",
|
||||
"README.md",
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
"": {
|
||||
"name": "bun-plugin-svelte",
|
||||
"devDependencies": {
|
||||
"@threlte/core": "8.0.1",
|
||||
"bun-types": "canary",
|
||||
"svelte": "^5.20.4",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^5",
|
||||
"typescript": "^5",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -26,6 +26,8 @@
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
|
||||
|
||||
"@threlte/core": ["@threlte/core@8.0.1", "", { "dependencies": { "mitt": "^3.0.1" }, "peerDependencies": { "svelte": ">=5", "three": ">=0.155" } }, "sha512-vy1xRQppJFNmfPTeiRQue+KmYFsbPgVhwuYXRTvVrwPeD2oYz43gxUeOpe1FACeGKxrxZykeKJF5ebVvl7gBxw=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="],
|
||||
|
||||
"@types/node": ["@types/node@22.13.5", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg=="],
|
||||
@@ -54,9 +56,11 @@
|
||||
|
||||
"magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="],
|
||||
|
||||
"mitt": ["mitt@3.0.1", "", {}, "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw=="],
|
||||
|
||||
"svelte": ["svelte@5.20.4", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "@jridgewell/sourcemap-codec": "^1.5.0", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "acorn-typescript": "^1.4.13", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^1.4.3", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-2Mo/AfObaw9zuD0u1JJ7sOVzRCGcpETEyDkLbtkcctWpCMCIyT0iz83xD8JT29SR7O4SgswuPRIDYReYF/607A=="],
|
||||
|
||||
"typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="],
|
||||
"three": ["three@0.174.0", "", {}, "sha512-p+WG3W6Ov74alh3geCMkGK9NWuT62ee21cV3jEnun201zodVF4tCE5aZa2U122/mkLRmhJJUQmLLW1BH00uQJQ=="],
|
||||
|
||||
"undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="],
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "bun-plugin-svelte",
|
||||
"version": "0.0.5",
|
||||
"version": "0.0.6",
|
||||
"description": "Official Svelte plugin for Bun",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -11,7 +11,11 @@ describe("SveltePlugin", () => {
|
||||
expect(() => SveltePlugin(undefined)).not.toThrow();
|
||||
});
|
||||
|
||||
it.each([null, 1, "hi", {}, "Client"])("throws if forceSide is not 'client' or 'server' (%p)", (forceSide: any) => {
|
||||
it.each([1, "hi", {}, "Client"])("throws if forceSide is not 'client' or 'server' (%p)", (forceSide: any) => {
|
||||
expect(() => SveltePlugin({ forceSide })).toThrow(TypeError);
|
||||
});
|
||||
|
||||
it.each([null, undefined])("forceSide may be nullish", (forceSide: any) => {
|
||||
expect(() => SveltePlugin({ forceSide })).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ import { describe, beforeAll, it, expect } from "bun:test";
|
||||
import type { BuildConfig } from "bun";
|
||||
import type { CompileOptions } from "svelte/compiler";
|
||||
|
||||
import { getBaseCompileOptions, type SvelteOptions } from "./options";
|
||||
import { getBaseCompileOptions, validateOptions, type SvelteOptions } from "./options";
|
||||
|
||||
describe("getBaseCompileOptions", () => {
|
||||
describe("when no options are provided", () => {
|
||||
@@ -42,4 +42,13 @@ describe("getBaseCompileOptions", () => {
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
}); // getBaseCompileOptions
|
||||
|
||||
describe("validateOptions(options)", () => {
|
||||
it.each(["", 1, null, undefined, true, false, Symbol("hi")])(
|
||||
"throws if options is not an object (%p)",
|
||||
(badOptions: any) => {
|
||||
expect(() => validateOptions(badOptions)).toThrow();
|
||||
},
|
||||
);
|
||||
}); // validateOptions
|
||||
|
||||
@@ -2,7 +2,8 @@ import { strict as assert } from "node:assert";
|
||||
import { type BuildConfig } from "bun";
|
||||
import type { CompileOptions, ModuleCompileOptions } from "svelte/compiler";
|
||||
|
||||
export interface SvelteOptions {
|
||||
type OverrideCompileOptions = Pick<CompileOptions, "customElement" | "runes" | "modernAst" | "namespace">;
|
||||
export interface SvelteOptions extends Pick<CompileOptions, "runes"> {
|
||||
/**
|
||||
* Force client-side or server-side generation.
|
||||
*
|
||||
@@ -20,6 +21,11 @@ export interface SvelteOptions {
|
||||
* Defaults to `true` when run via Bun's dev server, `false` otherwise.
|
||||
*/
|
||||
development?: boolean;
|
||||
|
||||
/**
|
||||
* Options to forward to the Svelte compiler.
|
||||
*/
|
||||
compilerOptions?: OverrideCompileOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -27,15 +33,24 @@ export interface SvelteOptions {
|
||||
*/
|
||||
export function validateOptions(options: unknown): asserts options is SvelteOptions {
|
||||
assert(options && typeof options === "object", new TypeError("bun-svelte-plugin: options must be an object"));
|
||||
if ("forceSide" in options) {
|
||||
switch (options.forceSide) {
|
||||
const opts = options as Record<keyof SvelteOptions, unknown>;
|
||||
|
||||
if (opts.forceSide != null) {
|
||||
if (typeof opts.forceSide !== "string") {
|
||||
throw new TypeError("bun-svelte-plugin: forceSide must be a string, got " + typeof opts.forceSide);
|
||||
}
|
||||
switch (opts.forceSide) {
|
||||
case "client":
|
||||
case "server":
|
||||
break;
|
||||
default:
|
||||
throw new TypeError(
|
||||
`bun-svelte-plugin: forceSide must be either 'client' or 'server', got ${options.forceSide}`,
|
||||
);
|
||||
throw new TypeError(`bun-svelte-plugin: forceSide must be either 'client' or 'server', got ${opts.forceSide}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.compilerOptions) {
|
||||
if (typeof opts.compilerOptions !== "object") {
|
||||
throw new TypeError("bun-svelte-plugin: compilerOptions must be an object");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -44,7 +59,10 @@ export function validateOptions(options: unknown): asserts options is SvelteOpti
|
||||
* @internal
|
||||
*/
|
||||
export function getBaseCompileOptions(pluginOptions: SvelteOptions, config: Partial<BuildConfig>): CompileOptions {
|
||||
let { development = false } = pluginOptions;
|
||||
let {
|
||||
development = false,
|
||||
compilerOptions: { customElement, runes, modernAst, namespace } = kEmptyObject as OverrideCompileOptions,
|
||||
} = pluginOptions;
|
||||
const { minify = false } = config;
|
||||
|
||||
const shouldMinify = Boolean(minify);
|
||||
@@ -68,6 +86,10 @@ export function getBaseCompileOptions(pluginOptions: SvelteOptions, config: Part
|
||||
preserveWhitespace: !minifyWhitespace,
|
||||
preserveComments: !shouldMinify,
|
||||
dev: development,
|
||||
customElement,
|
||||
runes,
|
||||
modernAst,
|
||||
namespace,
|
||||
cssHash({ css }) {
|
||||
// same prime number seed used by svelte/compiler.
|
||||
// TODO: ensure this provides enough entropy
|
||||
@@ -109,3 +131,4 @@ function generateSide(pluginOptions: SvelteOptions, config: Partial<BuildConfig>
|
||||
}
|
||||
|
||||
export const hash = (content: string): string => Bun.hash(content, 5381).toString(36);
|
||||
const kEmptyObject = Object.create(null);
|
||||
|
||||
@@ -24,13 +24,32 @@ afterAll(() => {
|
||||
}
|
||||
});
|
||||
|
||||
it("hello world component", async () => {
|
||||
const res = await Bun.build({
|
||||
entrypoints: [fixturePath("foo.svelte")],
|
||||
outdir,
|
||||
plugins: [SveltePlugin()],
|
||||
describe("given a hello world component", () => {
|
||||
const entrypoints = [fixturePath("foo.svelte")];
|
||||
it("when no options are provided, builds successfully", async () => {
|
||||
const res = await Bun.build({
|
||||
entrypoints,
|
||||
outdir,
|
||||
plugins: [SveltePlugin()],
|
||||
});
|
||||
expect(res.success).toBeTrue();
|
||||
});
|
||||
|
||||
describe("when a custom element is provided", () => {
|
||||
let res: BuildOutput;
|
||||
|
||||
beforeAll(async () => {
|
||||
res = await Bun.build({
|
||||
entrypoints,
|
||||
outdir,
|
||||
plugins: [SveltePlugin({ compilerOptions: { customElement: true } })],
|
||||
});
|
||||
});
|
||||
|
||||
it("builds successfully", () => {
|
||||
expect(res.success).toBeTrue();
|
||||
});
|
||||
});
|
||||
expect(res.success).toBeTrue();
|
||||
});
|
||||
|
||||
describe("when importing `.svelte.ts` files with ESM", () => {
|
||||
|
||||
172
packages/bun-polyfills/.gitignore
vendored
172
packages/bun-polyfills/.gitignore
vendored
@@ -1,172 +0,0 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
# Misc
|
||||
|
||||
_*
|
||||
.old
|
||||
.vscode
|
||||
!build
|
||||
@@ -1,9 +0,0 @@
|
||||
# Bun APIs Polyfills
|
||||
|
||||
Polyfills for Bun's JavaScript runtime APIs for use in environments outside of Bun, such as Node.js or the browser¹.
|
||||
|
||||
¹ **Note:** The current priority is Node.js, browser support will vary per polyfill.
|
||||
|
||||
## Usage
|
||||
|
||||
This is currently a work in progress and is not ready for general use.
|
||||
@@ -1,181 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun-polyfills",
|
||||
"dependencies": {
|
||||
"bun-wasm": "link:bun-wasm",
|
||||
"chalk": "^5.3.0",
|
||||
"js-md4": "^0.3.2",
|
||||
"open-editor": "^4.0.0",
|
||||
"supports-color": "^9.4.0",
|
||||
"which": "^3.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.5",
|
||||
"@types/which": "^3.0.0",
|
||||
"bun-types": "^0.7.0",
|
||||
"copyfiles": "^2.4.1",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0",
|
||||
},
|
||||
"optionalPeers": [
|
||||
"typescript",
|
||||
],
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/node": ["@types/node@20.4.5", "", {}, "sha512-rt40Nk13II9JwQBdeYqmbn2Q6IVTA5uPhvSO+JVqdXw/6/4glI6oR9ezty/A9Hg5u7JH4OmYmuQ+XvjKm0Datg=="],
|
||||
|
||||
"@types/which": ["@types/which@3.0.0", "", {}, "sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
|
||||
|
||||
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
|
||||
|
||||
"brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="],
|
||||
|
||||
"bun-types": ["bun-types@0.7.0", "", {}, "sha512-jXFiYtwSUQtD/Y3LHRWeWNwhFaUYvcO96zI7y3gSPgTq+ozxXpuTGDxABLdIKmFc672Q7Qp/OgrfJFEjg4Mnkg=="],
|
||||
|
||||
"bun-wasm": ["bun-wasm@link:bun-wasm", {}],
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"cliui": ["cliui@7.0.4", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" } }, "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
|
||||
|
||||
"copyfiles": ["copyfiles@2.4.1", "", { "dependencies": { "glob": "^7.0.5", "minimatch": "^3.0.3", "mkdirp": "^1.0.4", "noms": "0.0.0", "through2": "^2.0.1", "untildify": "^4.0.0", "yargs": "^16.1.0" }, "bin": { "copyfiles": "copyfiles", "copyup": "copyfiles" } }, "sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg=="],
|
||||
|
||||
"core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"define-lazy-prop": ["define-lazy-prop@2.0.0", "", {}, "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og=="],
|
||||
|
||||
"emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"env-editor": ["env-editor@1.1.0", "", {}, "sha512-7AXskzN6T7Q9TFcKAGJprUbpQa4i1VsAetO9rdBqbGMGlragTziBgWt4pVYJMBWHQlLoX0buy6WFikzPH4Qjpw=="],
|
||||
|
||||
"escalade": ["escalade@3.1.1", "", {}, "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="],
|
||||
|
||||
"execa": ["execa@5.1.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" } }, "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg=="],
|
||||
|
||||
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
|
||||
|
||||
"get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
|
||||
|
||||
"get-stream": ["get-stream@6.0.1", "", {}, "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg=="],
|
||||
|
||||
"glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="],
|
||||
|
||||
"human-signals": ["human-signals@2.1.0", "", {}, "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw=="],
|
||||
|
||||
"inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="],
|
||||
|
||||
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
|
||||
|
||||
"is-docker": ["is-docker@2.2.1", "", { "bin": { "is-docker": "cli.js" } }, "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ=="],
|
||||
|
||||
"is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
|
||||
|
||||
"is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
|
||||
|
||||
"is-wsl": ["is-wsl@2.2.0", "", { "dependencies": { "is-docker": "^2.0.0" } }, "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww=="],
|
||||
|
||||
"isarray": ["isarray@0.0.1", "", {}, "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
|
||||
"js-md4": ["js-md4@0.3.2", "", {}, "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA=="],
|
||||
|
||||
"line-column-path": ["line-column-path@3.0.0", "", { "dependencies": { "type-fest": "^2.0.0" } }, "sha512-Atocnm7Wr9nuvAn97yEPQa3pcQI5eLQGBz+m6iTb+CVw+IOzYB9MrYK7jI7BfC9ISnT4Fu0eiwhAScV//rp4Hw=="],
|
||||
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="],
|
||||
|
||||
"minimatch": ["minimatch@3.0.8", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q=="],
|
||||
|
||||
"mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
|
||||
"noms": ["noms@0.0.0", "", { "dependencies": { "inherits": "^2.0.1", "readable-stream": "~1.0.31" } }, "sha512-lNDU9VJaOPxUmXcLb+HQFeUgQQPtMI24Gt6hgfuMHRJgMRHMF/qZ4HJD3GDru4sSw9IQl2jPjAYnQrdIeLbwow=="],
|
||||
|
||||
"npm-run-path": ["npm-run-path@4.0.1", "", { "dependencies": { "path-key": "^3.0.0" } }, "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="],
|
||||
|
||||
"open": ["open@8.4.2", "", { "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", "is-wsl": "^2.2.0" } }, "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ=="],
|
||||
|
||||
"open-editor": ["open-editor@4.0.0", "", { "dependencies": { "env-editor": "^1.0.0", "execa": "^5.1.1", "line-column-path": "^3.0.0", "open": "^8.4.0" } }, "sha512-5mKZ98iFdkivozt5XTCOspoKbL3wtYu6oOoVxfWQ0qUX9NYsK8pdkHE7VUHXr+CwyC3nf6mV0S5FPsMS65innw=="],
|
||||
|
||||
"path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="],
|
||||
|
||||
"readable-stream": ["readable-stream@1.0.34", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", "isarray": "0.0.1", "string_decoder": "~0.10.x" } }, "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg=="],
|
||||
|
||||
"require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
|
||||
|
||||
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||
|
||||
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||
|
||||
"signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="],
|
||||
|
||||
"string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
|
||||
|
||||
"string_decoder": ["string_decoder@0.10.31", "", {}, "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@2.0.0", "", {}, "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA=="],
|
||||
|
||||
"supports-color": ["supports-color@9.4.0", "", {}, "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw=="],
|
||||
|
||||
"through2": ["through2@2.0.5", "", { "dependencies": { "readable-stream": "~2.3.6", "xtend": "~4.0.1" } }, "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ=="],
|
||||
|
||||
"type-fest": ["type-fest@2.19.0", "", {}, "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="],
|
||||
|
||||
"untildify": ["untildify@4.0.0", "", {}, "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw=="],
|
||||
|
||||
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
|
||||
|
||||
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
|
||||
|
||||
"wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
|
||||
|
||||
"y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
|
||||
|
||||
"yargs": ["yargs@16.2.0", "", { "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw=="],
|
||||
|
||||
"yargs-parser": ["yargs-parser@20.2.9", "", {}, "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w=="],
|
||||
|
||||
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"glob/minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
|
||||
|
||||
"through2/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"through2/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],
|
||||
|
||||
"through2/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
}
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
// @ts-check
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const { instance } = /** @type {ZighashInstance} */(
|
||||
await WebAssembly.instantiate(
|
||||
fs.readFileSync(path.join(path.dirname(fileURLToPath(import.meta.url)), 'zighash.wasm')),
|
||||
{
|
||||
env: {
|
||||
/** @param {any} x */
|
||||
print(x) { console.log(x); },
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
const exports = instance.exports;
|
||||
const mem = exports.memory;
|
||||
const memview = {
|
||||
get u8() { return new Uint8Array(mem.buffer); },
|
||||
get u16() { return new Uint16Array(mem.buffer); },
|
||||
get u32() { return new Uint32Array(mem.buffer); },
|
||||
get u64() { return new BigUint64Array(mem.buffer); },
|
||||
get i8() { return new Int8Array(mem.buffer); },
|
||||
get i16() { return new Int16Array(mem.buffer); },
|
||||
get i32() { return new Int32Array(mem.buffer); },
|
||||
get i64() { return new BigInt64Array(mem.buffer); },
|
||||
get f32() { return new Float32Array(mem.buffer); },
|
||||
get f64() { return new Float64Array(mem.buffer); },
|
||||
};
|
||||
|
||||
const nullptr = { ptr: -1, size: 0 };
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const allocBuffer = (
|
||||
/** @type {ArrayBufferView | ArrayBuffer | SharedArrayBuffer} */ buf,
|
||||
/** @type {boolean=} */ nullTerminate = false,
|
||||
) => {
|
||||
const size = buf.byteLength + +nullTerminate;
|
||||
if (size === 0) return nullptr;
|
||||
const ptr = exports.alloc(size);
|
||||
if (ptr === -1) throw new Error('WASM memory allocation failed');
|
||||
const u8heap = memview.u8;
|
||||
u8heap.set(new Uint8Array(ArrayBuffer.isView(buf) ? buf.buffer : buf), ptr);
|
||||
if (nullTerminate) u8heap[ptr + buf.byteLength] = 0;
|
||||
return { ptr, size };
|
||||
};
|
||||
const allocString = (
|
||||
/** @type {string} */ str,
|
||||
/** @type {boolean=} */ nullTerminate = true,
|
||||
) => {
|
||||
const strbuf = encoder.encode(str);
|
||||
return allocBuffer(strbuf, nullTerminate);
|
||||
};
|
||||
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function wyhash(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.wyhash(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function adler32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.adler32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function crc32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.crc32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function cityhash32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.cityhash32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function cityhash64(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.cityhash64(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function xxhash32(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.xxhash32(ptr, size, seed)
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function xxhash64(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.xxhash64(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function xxhash3(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.xxhash3(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v3(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v3(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v2(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v2(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function murmur64v2(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.murmur64v2(ptr, size, seed));
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"name": "zighash-wasm",
|
||||
"module": "index.mjs",
|
||||
"scripts": {
|
||||
"build": "bun run clean && zig build-lib src/main.zig --name zighash -target wasm32-freestanding -dynamic -rdynamic -OReleaseSmall",
|
||||
"clean": "rm -f *.wasm *.o"
|
||||
}
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
extern fn print(*const u8) void;
|
||||
|
||||
comptime {
|
||||
std.debug.assert(@alignOf(u16) >= 2);
|
||||
std.debug.assert(@alignOf(u32) >= 4);
|
||||
std.debug.assert(@alignOf(u64) >= 8);
|
||||
std.debug.assert(@alignOf(i16) >= 2);
|
||||
std.debug.assert(@alignOf(i32) >= 4);
|
||||
std.debug.assert(@alignOf(i64) >= 8);
|
||||
}
|
||||
|
||||
export fn alloc(size: u32) [*]const u8 {
|
||||
const slice = std.heap.wasm_allocator.alloc(u8, size) catch @panic("wasm failed to allocate memory");
|
||||
return slice.ptr;
|
||||
}
|
||||
|
||||
export fn wyhash(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Wyhash.hash(seed, input);
|
||||
}
|
||||
export fn adler32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Adler32.hash(input);
|
||||
}
|
||||
export fn crc32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Crc32.hash(input);
|
||||
}
|
||||
export fn cityhash32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash32.hash(input);
|
||||
}
|
||||
export fn cityhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash64.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn xxhash32(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.XxHash32.hash(seed, input);
|
||||
}
|
||||
export fn xxhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.XxHash64.hash(seed, input);
|
||||
}
|
||||
export fn xxhash3(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.XxHash3.hash(seed, input);
|
||||
}
|
||||
export fn murmur32v3(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur3_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur32v2(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur64v2(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_64.hashWithSeed(input, seed);
|
||||
}
|
||||
28
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
28
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
@@ -1,28 +0,0 @@
|
||||
type WasmHash32Function = (input_ptr: number, input_size: number) => number;
|
||||
type WasmHash64Function = (input_ptr: number, input_size: number) => bigint;
|
||||
type WasmSeededHash32Function = (input_ptr: number, input_size: number, seed: number) => number;
|
||||
type WasmSeededHash64Function = (input_ptr: number, input_size: number, seed: bigint) => bigint;
|
||||
type JSHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
type JSHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => bigint;
|
||||
type JSSeededHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
type JSSeededHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
|
||||
type ZighashInstance = WebAssembly.WebAssemblyInstantiatedSource & {
|
||||
instance: {
|
||||
exports: {
|
||||
memory: WebAssembly.Memory,
|
||||
alloc(size: number): number,
|
||||
wyhash: WasmSeededHash64Function,
|
||||
adler32: WasmHash32Function,
|
||||
crc32: WasmHash32Function,
|
||||
cityhash32: WasmHash32Function,
|
||||
cityhash64: WasmSeededHash64Function,
|
||||
xxhash32: WasmSeededHash32Function,
|
||||
xxhash64: WasmSeededHash64Function,
|
||||
xxhash3: WasmSeededHash64Function,
|
||||
murmur32v3: WasmSeededHash32Function,
|
||||
murmur32v2: WasmSeededHash32Function,
|
||||
murmur64v2: WasmSeededHash64Function,
|
||||
};
|
||||
};
|
||||
}
|
||||
Binary file not shown.
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"type": "module",
|
||||
"name": "bun-polyfills",
|
||||
"module": "src/index.ts",
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.5",
|
||||
"@types/which": "^3.0.0",
|
||||
"bun-types": "^0.7.0",
|
||||
"copyfiles": "^2.4.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"node": "node --enable-source-maps --import ./dist/src/repl.js",
|
||||
"clean": "rm -rf dist",
|
||||
"preprocess": "bun tools/updateversions.ts",
|
||||
"build": "bun run clean && bun run preprocess && bunx tsc && bunx copyfiles \"./lib/**/*.wasm\" dist",
|
||||
"build/wasm": "bun run build/zighash",
|
||||
"build/zighash": "cd lib/zighash && bun run build && cd ../.."
|
||||
},
|
||||
"dependencies": {
|
||||
"bun-wasm": "link:bun-wasm",
|
||||
"chalk": "^5.3.0",
|
||||
"js-md4": "^0.3.2",
|
||||
"open-editor": "^4.0.0",
|
||||
"supports-color": "^9.4.0",
|
||||
"which": "^3.0.1"
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
//? Implements: Red colored console.error from Bun
|
||||
//if (Bun.enableANSIColors) {
|
||||
// const RED = '\x1B[31m' as const;
|
||||
// const RESET = '\x1B[0m' as const;
|
||||
// const consoleError = console.error;
|
||||
// console.error = (...args) => {
|
||||
// if (typeof args[0] === 'string') args[0] = RED + args[0];
|
||||
// consoleError(...args, RESET);
|
||||
// };
|
||||
//}
|
||||
|
||||
//? Implements: for await (const line of console) { ... }
|
||||
console[Symbol.asyncIterator] = async function* () {
|
||||
while (true) yield await new Promise(resolve => {
|
||||
process.stdin.on('data', (data: Buffer | string) => {
|
||||
const str = data.toString('utf-8').replaceAll(/[\r\n]+/g, '');
|
||||
resolve(str);
|
||||
});
|
||||
});
|
||||
} satisfies Console[typeof Symbol.asyncIterator];
|
||||
|
||||
//? Implements: Bun-exclusive console function
|
||||
console.write = ((...data) => {
|
||||
const str = data.map(val => {
|
||||
if (val instanceof ArrayBuffer) val = new TextDecoder('utf-8').decode(val);
|
||||
else if (typeof val === 'object') val = new TextDecoder('utf-8').decode(val.buffer);
|
||||
return val;
|
||||
}).join('');
|
||||
process.stdout.write(str);
|
||||
return new TextEncoder('utf-8').encode(str).byteLength;
|
||||
}) satisfies Console['write'];
|
||||
@@ -1,32 +0,0 @@
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
// Without an ESM loader, this polyfill is impossible to apply automatically,
|
||||
// due to the per-module nature of import.meta. In order to use this polyfill,
|
||||
// you must import it in every module that uses import.meta, and call it with
|
||||
// the import.meta object as the argument. When the polyfills are integrated
|
||||
// with bun build, this could be done automatically by the build process at
|
||||
// the top of every module file bundled.
|
||||
|
||||
export default function polyfillImportMeta(metaIn: ImportMeta) {
|
||||
const require2 = createRequire(metaIn.url);
|
||||
const metapath = fileURLToPath(metaIn.url);
|
||||
const meta: ImportMeta = {
|
||||
url: metaIn.url,
|
||||
main: metapath === process.argv[1],
|
||||
path: metapath,
|
||||
dir: path.dirname(metapath),
|
||||
file: path.basename(metapath),
|
||||
require: require2,
|
||||
resolve: metaIn.resolve,
|
||||
resolveSync(id: string, parent?: string) {
|
||||
return require2.resolve(id, {
|
||||
paths: typeof parent === 'string' ? [
|
||||
path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..')
|
||||
] : undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
Object.assign(metaIn, meta);
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
import { version } from '../modules/bun.js';
|
||||
import './console.js';
|
||||
import './process.js';
|
||||
import os from 'node:os';
|
||||
|
||||
//? NodeJS Blob doesn't implement Blob.json(), so we need to polyfill it.
|
||||
Blob.prototype.json = async function json<T>(this: Blob): Promise<T> {
|
||||
try {
|
||||
return JSON.parse(await this.text()) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, json);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
//? navigator global object polyfill
|
||||
Reflect.set(globalThis, 'navigator', {
|
||||
userAgent: `Bun/${version}`,
|
||||
hardwareConcurrency: os.cpus().length,
|
||||
});
|
||||
|
||||
//? method only available in Bun
|
||||
// this isn't quite accurate, but it shouldn't break anything and is currently here just for matching bun and node types
|
||||
const ReadableStreamDefaultReaderPrototype = Object.getPrototypeOf(new ReadableStream().getReader());
|
||||
Reflect.set(
|
||||
ReadableStreamDefaultReaderPrototype, 'readMany',
|
||||
function readMany(this: ReadableStreamDefaultReader): Promise<ReadableStreamDefaultReadManyResult<any>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const result: ReadableStreamDefaultReadManyResult<any> = {
|
||||
value: [],
|
||||
size: 0,
|
||||
done: true
|
||||
};
|
||||
this.read().then(({ done, value }) => {
|
||||
if (done) resolve(result);
|
||||
else {
|
||||
result.value.push(value);
|
||||
result.size = value.length;
|
||||
result.done = false;
|
||||
resolve(result);
|
||||
}
|
||||
}, reject);
|
||||
});
|
||||
}
|
||||
);
|
||||
@@ -1,19 +0,0 @@
|
||||
|
||||
if (typeof process === 'object' && process !== null) {
|
||||
// process polyfills (node-only)
|
||||
Reflect.set(process, 'isBun', 1 satisfies Process['isBun']);
|
||||
Reflect.set(process, 'browser', false satisfies Process['browser']);
|
||||
|
||||
const NULL_VERSION = '0'.repeat(39) + '1';
|
||||
process.versions.bun = '0.7.1' satisfies Process['versions'][string]; // TODO: This can probably be fetched from somewhere in the repo
|
||||
process.versions.webkit = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.mimalloc = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.libarchive = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.picohttpparser = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.boringssl = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.zig = '0.10.0' satisfies Process['versions'][string];
|
||||
Reflect.set(process, 'revision', NULL_VERSION satisfies Process['revision']);
|
||||
|
||||
// Doesn't work on Windows sadly
|
||||
//Object.defineProperty(process, 'execPath', { value: path.resolve(root, 'cli.js') });
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export * from './modules/bun.js';
|
||||
export * as default from './modules/bun.js';
|
||||
import './global/index.js';
|
||||
@@ -1,518 +0,0 @@
|
||||
import type {
|
||||
BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot,
|
||||
EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash
|
||||
} from 'bun';
|
||||
import { TextDecoderStream } from 'node:stream/web';
|
||||
import { NotImplementedError, type SystemError } from '../utils/errors.js';
|
||||
import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js';
|
||||
import dnsPolyfill from './bun/dns.js';
|
||||
import { FileSink } from './bun/filesink.js';
|
||||
import {
|
||||
bunHash, bunHashProto,
|
||||
MD4 as MD4Polyfill, MD5 as MD5Polyfill,
|
||||
SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill,
|
||||
SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill,
|
||||
SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill
|
||||
} from './bun/hashes.js';
|
||||
import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
|
||||
import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
|
||||
import TranspilerImpl from './bun/transpiler.js';
|
||||
import fs from 'node:fs';
|
||||
import v8 from 'node:v8';
|
||||
import path from 'node:path';
|
||||
import util from 'node:util';
|
||||
import zlib from 'node:zlib';
|
||||
import streams from 'node:stream';
|
||||
import workers from 'node:worker_threads';
|
||||
import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process';
|
||||
import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url';
|
||||
import npm_which from 'which';
|
||||
import openEditor from 'open-editor';
|
||||
|
||||
export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main;
|
||||
|
||||
//? These are automatically updated on build by tools/updateversions.ts, do not edit manually.
|
||||
export const version = '0.7.4' satisfies typeof Bun.version;
|
||||
export const revision = '56816a3ec845a4b9fc40ade34dbe5c0033433d51' satisfies typeof Bun.revision;
|
||||
|
||||
export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => {
|
||||
const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.');
|
||||
Error.captureStackTrace(err, gc);
|
||||
throw err;
|
||||
})) satisfies typeof Bun.gc;
|
||||
|
||||
//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter
|
||||
export const origin = '' satisfies typeof Bun.origin;
|
||||
// @ts-expect-error ---
|
||||
export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin;
|
||||
// @ts-expect-error ---
|
||||
export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout;
|
||||
// @ts-expect-error ---
|
||||
export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr;
|
||||
export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv;
|
||||
export const env = process.env satisfies typeof Bun.env;
|
||||
Object.setPrototypeOf(env, {
|
||||
toJSON(this: typeof env) { return { ...this }; }
|
||||
});
|
||||
// @ts-expect-error supports-color types are unbelievably bad
|
||||
export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors;
|
||||
|
||||
export const hash = bunHash satisfies typeof Bun.hash;
|
||||
Object.setPrototypeOf(hash, bunHashProto satisfies Hash);
|
||||
|
||||
export const unsafe = {
|
||||
gcAggressionLevel: () => 0, //! no-op
|
||||
arrayBufferToString: (buf) => new TextDecoder().decode(buf),
|
||||
segfault: () => {
|
||||
const segfault = new Error();
|
||||
segfault.name = 'SegfaultTest';
|
||||
segfault.message = '';
|
||||
console.error(segfault);
|
||||
process.exit(1);
|
||||
}
|
||||
} satisfies typeof Bun['unsafe'];
|
||||
|
||||
export const Transpiler = TranspilerImpl satisfies typeof Bun.Transpiler;
|
||||
|
||||
export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
|
||||
export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
|
||||
export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
|
||||
export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224;
|
||||
export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512;
|
||||
export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384;
|
||||
export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256;
|
||||
export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256;
|
||||
|
||||
export const indexOfLine = ((data, offset) => {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data);
|
||||
if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer);
|
||||
return data.indexOf(10, offset);
|
||||
}) satisfies typeof Bun.indexOfLine;
|
||||
|
||||
const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) {
|
||||
throw new NotImplementedError('Bun.peek', peek);
|
||||
};
|
||||
peek_.status = (promise => {
|
||||
return util.inspect(promise).includes('<pending>') ? 'pending'
|
||||
: util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled';
|
||||
}) satisfies typeof Bun.peek.status;
|
||||
export const peek = peek_ satisfies typeof Bun.peek;
|
||||
|
||||
export const sleep = (ms => {
|
||||
return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms));
|
||||
}) satisfies typeof Bun.sleep;
|
||||
export const sleepSync = (ms => {
|
||||
if (ms < 0) throw new TypeError('argument to sleepSync must not be negative');
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
|
||||
}) satisfies typeof Bun.sleepSync;
|
||||
|
||||
//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway.
|
||||
//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically."
|
||||
//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one.
|
||||
export const inspect = ((arg: any): string => util.inspect(arg, {
|
||||
breakLength: Infinity,
|
||||
colors: false,
|
||||
compact: true,
|
||||
customInspect: false,
|
||||
depth: Infinity,
|
||||
getters: true,
|
||||
maxArrayLength: Infinity,
|
||||
maxStringLength: Infinity,
|
||||
showHidden: false,
|
||||
showProxy: false,
|
||||
sorted: false
|
||||
})) satisfies typeof Bun.inspect;
|
||||
|
||||
export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync;
|
||||
export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve;
|
||||
|
||||
//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js
|
||||
export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe;
|
||||
|
||||
export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => {
|
||||
process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', {
|
||||
type: 'BunPolyfillWarning',
|
||||
code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT',
|
||||
detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.'
|
||||
});
|
||||
const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8');
|
||||
const json = JSON.parse(raw) as V8HeapSnapshot;
|
||||
return {
|
||||
version: 2,
|
||||
type: 'Inspector',
|
||||
nodes: json.nodes,
|
||||
edges: json.edges,
|
||||
edgeTypes: json.snapshot.meta.edge_types.flat(),
|
||||
edgeNames: json.snapshot.meta.edge_fields.flat(),
|
||||
nodeClassNames: json.snapshot.meta.node_types.flat(),
|
||||
};
|
||||
// @ts-expect-error Refer to the above emitWarning call
|
||||
}) satisfies typeof Bun.generateHeapSnapshot;
|
||||
|
||||
//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know.
|
||||
export const shrink = (() => void 0) satisfies typeof Bun.shrink;
|
||||
|
||||
export const openInEditor = ((file: string, opts?: EditorOptions) => {
|
||||
const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const;
|
||||
if (opts?.editor) openEditor(target, opts);
|
||||
else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' });
|
||||
}) satisfies typeof Bun.openInEditor;
|
||||
|
||||
export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve;
|
||||
|
||||
export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => {
|
||||
if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file);
|
||||
return new FileBlob(path, options);
|
||||
}) satisfies typeof Bun.file;
|
||||
|
||||
export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => {
|
||||
if (!isFileBlob(dest)) {
|
||||
let fd: number;
|
||||
if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w');
|
||||
// bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition...
|
||||
else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w');
|
||||
else fd = fs.openSync(Buffer.from(dest.buffer), 'w');
|
||||
|
||||
if (input instanceof Response || input instanceof Blob) {
|
||||
const data = await input.text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
if (Array.isArray(input)) {
|
||||
const data = await new Blob(input).text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
});
|
||||
} else {
|
||||
const writer = dest.writer();
|
||||
if (Array.isArray(input)) input = new Blob(input);
|
||||
if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer());
|
||||
if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input);
|
||||
if (typeof input === 'string') return writer.write(input);
|
||||
else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
}
|
||||
}) satisfies typeof Bun.write;
|
||||
|
||||
export const sha = SHA512_256.hash satisfies typeof Bun.sha;
|
||||
|
||||
export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds;
|
||||
|
||||
//? This just prints out some debug stuff in console, and as the name implies no one should be using it.
|
||||
//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function.
|
||||
export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => {
|
||||
console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.');
|
||||
}) satisfies unknown; /* undocumented */
|
||||
|
||||
export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync;
|
||||
export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync;
|
||||
export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync;
|
||||
export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync;
|
||||
|
||||
export const which = ((cmd: string, options) => {
|
||||
const opts: npm_which.Options = { all: false, nothrow: true };
|
||||
if (options?.PATH) opts.path = options.PATH;
|
||||
const result = npm_which.sync(cmd, opts) as string | null;
|
||||
if (!result || !options?.cwd) return result;
|
||||
if (path.normalize(result).includes(path.normalize(options.cwd))) return result;
|
||||
else return null;
|
||||
}) satisfies typeof Bun.which;
|
||||
|
||||
export const spawn = ((...args) => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let stdinSrc: typeof opts.stdio[0] = null;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdinSrc = opts.stdio[0];
|
||||
stdio[0] = 'pipe';
|
||||
}
|
||||
|
||||
const subp = chp.spawn(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
// why is this set to (string | number) on env values...
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio
|
||||
}) as unknown as Subprocess;
|
||||
const subpAsNode = subp as unknown as ChildProcess;
|
||||
const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const;
|
||||
if (subpAsNode.stdout) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stdout = rstream;
|
||||
}
|
||||
if (subpAsNode.stderr) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stderr = rstream;
|
||||
}
|
||||
let internalStdinStream: streams.Writable;
|
||||
if (subpAsNode.stdin) {
|
||||
const wstream = subpAsNode.stdin;
|
||||
Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) {
|
||||
void this.end(); /* if it fails its already closed */
|
||||
return this;
|
||||
});
|
||||
internalStdinStream = wstream;
|
||||
(<Mutable<Subprocess>>subp).stdin = new FileSink(wstream);
|
||||
|
||||
}
|
||||
Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } });
|
||||
Object.defineProperty(subp, 'exited', {
|
||||
value: new Promise((resolve, reject) => {
|
||||
subpAsNode.once('exit', (code) => {
|
||||
stdstreams[0]?.destroy();
|
||||
stdstreams[1]?.destroy();
|
||||
stdstreams[2]?.destroy();
|
||||
subp.kill();
|
||||
subp.unref();
|
||||
subpAsNode.disconnect?.();
|
||||
subpAsNode.removeAllListeners();
|
||||
resolve(code);
|
||||
});
|
||||
})
|
||||
});
|
||||
if (stdinSrc) subpAsNode.once('spawn', () => {
|
||||
const stdinWeb = streams.Writable.toWeb(internalStdinStream);
|
||||
if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]);
|
||||
if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb);
|
||||
else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb);
|
||||
else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream);
|
||||
else void stdinSrc;
|
||||
});
|
||||
// change the error stack to point to the spawn() call instead of internal Node.js callback stuff
|
||||
const here = new Error('§__PLACEHOLDER__§');
|
||||
Error.captureStackTrace(here, spawn);
|
||||
if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => {
|
||||
err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? ''));
|
||||
err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message);
|
||||
throw err;
|
||||
}) as unknown as Subprocess;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawn;
|
||||
export const spawnSync = ((...args): SyncSubprocess => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let input: ArrayBufferView | string | undefined;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdio[0] = null; // will be overridden by chp.spawnSync "input" option
|
||||
//! Due to the fully async nature of Blobs, Responses and Requests,
|
||||
//! we can't synchronously get the data out of them here in userland.
|
||||
if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync);
|
||||
else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync);
|
||||
else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]);
|
||||
else input = opts.stdio[0] as ArrayBufferView;
|
||||
}
|
||||
|
||||
const subp = chp.spawnSync(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio, input
|
||||
}) as unknown as SyncSubprocess;
|
||||
const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>;
|
||||
if (subpAsNode.error) throw subpAsNode.error;
|
||||
|
||||
subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal)
|
||||
subp.success = subp.exitCode === 0;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawnSync;
|
||||
|
||||
export const escapeHTML = ((input) => {
|
||||
const str = String(input);
|
||||
let out = '';
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str[i];
|
||||
switch (char) {
|
||||
case '"': out += '"'; break;
|
||||
case "'": out += '''; break;
|
||||
case '&': out += '&'; break;
|
||||
case '<': out += '<'; break;
|
||||
case '>': out += '>'; break;
|
||||
default: out += char;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}) satisfies typeof Bun.escapeHTML;
|
||||
|
||||
export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => {
|
||||
return (async () => {
|
||||
const sink = new ArrayBufferSink();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
sink.write(value);
|
||||
}
|
||||
return sink.end() as ArrayBuffer;
|
||||
})();
|
||||
}) satisfies typeof Bun.readableStreamToArrayBuffer;
|
||||
|
||||
export const readableStreamToBytes = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): Uint8Array | Promise<Uint8Array> => {
|
||||
return (async () => {
|
||||
const sink = new ArrayBufferSink();
|
||||
sink.start({ asUint8Array: true });
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
sink.write(value);
|
||||
}
|
||||
return sink.end() as Uint8Array;
|
||||
})();
|
||||
}) satisfies typeof Bun.readableStreamToBytes;
|
||||
|
||||
export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => {
|
||||
let result = '';
|
||||
const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
//! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh
|
||||
if (done || !value || !value?.length) break;
|
||||
result += value;
|
||||
}
|
||||
return result;
|
||||
}) satisfies typeof Bun.readableStreamToText;
|
||||
export const readableStreamToBlob = (async (stream: ReadableStream<any>) => {
|
||||
const parts = await readableStreamToArray(stream);
|
||||
return new Blob(parts as BlobPart[]);
|
||||
}) satisfies typeof Bun.readableStreamToBlob;
|
||||
export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => {
|
||||
const array = new Array<T>();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done || !value || !(<any>value)?.length) break;
|
||||
array.push(value as unknown as T);
|
||||
}
|
||||
return array;
|
||||
}) satisfies typeof Bun.readableStreamToArray;
|
||||
export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => {
|
||||
const text = await readableStreamToText(stream);
|
||||
try {
|
||||
return JSON.parse(text) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, readableStreamToJSON);
|
||||
throw err;
|
||||
}
|
||||
}) satisfies typeof Bun.readableStreamToJSON;
|
||||
|
||||
export const concatArrayBuffers = ((buffers, maxLength = Infinity, asUint8Array = false) => {
|
||||
let size = 0;
|
||||
for (const chunk of buffers) size += chunk.byteLength;
|
||||
size = Math.min(size, maxLength);
|
||||
const buffer = new ArrayBuffer(size);
|
||||
const view = new Uint8Array(buffer);
|
||||
let offset = 0;
|
||||
for (const chunk of buffers) {
|
||||
if (offset > size) break;
|
||||
view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
if (asUint8Array) return view;
|
||||
return buffer;
|
||||
}) satisfies typeof Bun.concatArrayBuffers;
|
||||
|
||||
export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink;
|
||||
|
||||
export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL;
|
||||
export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath;
|
||||
|
||||
export const dns = dnsPolyfill satisfies typeof Bun.dns;
|
||||
|
||||
export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread;
|
||||
|
||||
//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats.
|
||||
//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible,
|
||||
//! but without needing to strip out all Bun.plugin calls from the source code for running on Node.
|
||||
const dummyPluginBuilder: PluginBuilder = ({
|
||||
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
config: { plugins: [], entrypoints: [] },
|
||||
}) satisfies PluginBuilder;
|
||||
const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>;
|
||||
bunPlugin.clearAll = () => void 0;
|
||||
export const plugin = bunPlugin satisfies typeof Bun.plugin;
|
||||
/*void plugin({
|
||||
name: 'test',
|
||||
target: 'bun',
|
||||
setup(builder) {
|
||||
if (builder.target !== 'bun') return;
|
||||
builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.importer;
|
||||
if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' };
|
||||
else return;
|
||||
});
|
||||
builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.path;
|
||||
return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' };
|
||||
});
|
||||
}
|
||||
});*/
|
||||
@@ -1,67 +0,0 @@
|
||||
type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>;
|
||||
|
||||
export class ArrayBufferSink implements BunArrayBufferSink {
|
||||
#started: boolean = true;
|
||||
#closed: boolean = false;
|
||||
#offset: number = 0;
|
||||
#stream: boolean = false;
|
||||
#asUint8: boolean = false;
|
||||
#buffer: Buffer = Buffer.allocUnsafe(8192);
|
||||
|
||||
get sinkId(): number { return 0; } //? undocumented, seems to always return 0
|
||||
|
||||
#ASSERT_NOT_CLOSED(caller: AnyFunction): void {
|
||||
if (!this.#closed) return;
|
||||
const err = new TypeError('Expected Sink');
|
||||
Error.captureStackTrace(err, caller);
|
||||
throw err;
|
||||
}
|
||||
|
||||
start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void {
|
||||
this.#ASSERT_NOT_CLOSED(this.start);
|
||||
this.#started = true;
|
||||
this.#offset = 0;
|
||||
this.#stream = stream;
|
||||
this.#asUint8 = asUint8Array;
|
||||
if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark);
|
||||
}
|
||||
|
||||
write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
this.#ASSERT_NOT_CLOSED(this.write);
|
||||
if (typeof data === 'string') data = new TextEncoder().encode(data);
|
||||
const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
||||
// this is very bad API design to not throw an error here, but it's what Bun does
|
||||
if (!this.#started) return writedata.byteLength;
|
||||
|
||||
if (this.#offset + writedata.byteLength > this.#buffer.byteLength) {
|
||||
const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024;
|
||||
const newBuffer = Buffer.allocUnsafe(newLength);
|
||||
newBuffer.set(this.#buffer);
|
||||
this.#buffer = newBuffer;
|
||||
}
|
||||
this.#buffer.set(writedata, this.#offset);
|
||||
this.#offset += writedata.byteLength;
|
||||
return writedata.byteLength;
|
||||
}
|
||||
|
||||
flush(): number | Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.flush);
|
||||
if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing
|
||||
const flushed = new Uint8Array(this.#offset);
|
||||
flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice
|
||||
this.#offset = 0;
|
||||
return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer;
|
||||
}
|
||||
|
||||
end(): Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.end);
|
||||
const stream = this.#stream;
|
||||
this.#stream = true; // force flush() to return the data
|
||||
const buffer = this.flush() as Uint8Array | ArrayBuffer;
|
||||
this.#stream = stream;
|
||||
this.#started = false;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
close(): void { this.#closed = true; } //? undocumented
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
import dns from 'node:dns';
|
||||
|
||||
const dnsObj: typeof Bun.dns = {
|
||||
async lookup(hostname, options) {
|
||||
const opts = { verbatim: true, all: true } as dns.LookupOptions;
|
||||
if (options?.family) {
|
||||
if (options.family === 'IPv4') opts.family = 4;
|
||||
else if (options.family === 'IPv6') opts.family = 6;
|
||||
else if (options.family === 'any') opts.family = 0;
|
||||
else opts.family = options.family;
|
||||
}
|
||||
if (options?.flags) opts.hints = options.flags;
|
||||
const records = ((await dns.promises.resolveAny(hostname))
|
||||
.filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[])
|
||||
.map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl }));
|
||||
return records;
|
||||
},
|
||||
// This has more properties but they're not documented on bun-types yet, oh well.
|
||||
};
|
||||
|
||||
export default dnsObj;
|
||||
@@ -1,195 +0,0 @@
|
||||
import fs from 'node:fs';
|
||||
import tty from 'node:tty';
|
||||
import streams from 'node:stream';
|
||||
import { ReadableStream as NodeWebReadableStream } from 'node:stream/web';
|
||||
import { FileSink } from './filesink.js';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun';
|
||||
|
||||
type NodeJSStream = streams.Readable | streams.Writable;
|
||||
|
||||
function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> {
|
||||
if (stream.isPaused()) stream.resume();
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: any[] = [];
|
||||
const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); };
|
||||
const end = () => {
|
||||
resolve(new Blob(chunks, type != null ? { type } : undefined));
|
||||
stream.off('data', dataHandler);
|
||||
stream.off('end', end);
|
||||
stream.pause();
|
||||
};
|
||||
stream.once('data', dataHandler).once('end', end);
|
||||
//.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we.
|
||||
});
|
||||
}
|
||||
|
||||
export const NodeJSStreamFileBlob = class FileBlob extends Blob {
|
||||
constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') {
|
||||
super(undefined, { type });
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (source === process.stdout || source === process.stdin || source === process.stderr) {
|
||||
this.#iostream = true;
|
||||
}
|
||||
this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream);
|
||||
this.#source = source;
|
||||
this.#slice = slice;
|
||||
this.#size = Infinity;
|
||||
}
|
||||
readonly #iostream: boolean = false;
|
||||
readonly #readable: boolean;
|
||||
readonly #source: NodeJSStream;
|
||||
readonly #slice: [number?, number?];
|
||||
#size: number;
|
||||
|
||||
slice(begin?: number, end?: number, contentType?: string): Blob;
|
||||
slice(begin?: number, contentType?: string): Blob;
|
||||
slice(contentType?: string): Blob;
|
||||
slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob {
|
||||
if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType);
|
||||
if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType);
|
||||
return new FileBlob(this.#source, [beginOrType, endOrType], contentType);
|
||||
}
|
||||
|
||||
override stream(): ReadableStream<Uint8Array> {
|
||||
// This makes no sense but Bun does it so we will too
|
||||
if (!this.#readable) return new ReadableStream();
|
||||
return streams.Readable.toWeb(this.#source as streams.Readable);
|
||||
}
|
||||
|
||||
#blobStackFn: AnyFunction = this.#getBlob;
|
||||
|
||||
async #getBlob(): Promise<Blob> {
|
||||
if (!this.#readable) {
|
||||
const err = new SystemError(-1, 'read');
|
||||
Error.captureStackTrace(err, this.#blobStackFn);
|
||||
throw err;
|
||||
}
|
||||
const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice);
|
||||
this.#size = blob.size;
|
||||
return blob;
|
||||
}
|
||||
|
||||
override async text(): Promise<string> {
|
||||
if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text;
|
||||
return (await this.#getBlob()).text();
|
||||
}
|
||||
override async arrayBuffer(): Promise<ArrayBuffer> {
|
||||
this.#blobStackFn = this.arrayBuffer;
|
||||
return (await this.#getBlob()).arrayBuffer();
|
||||
}
|
||||
override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> {
|
||||
this.#blobStackFn = this.json;
|
||||
return JSON.parse(await this.text()) as Promise<TJSONReturnType>;
|
||||
}
|
||||
|
||||
override get size(): number { return this.#size; }
|
||||
override set size(_) { return; }
|
||||
};
|
||||
|
||||
export class FileBlob extends Blob implements BunFileBlob {
|
||||
constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) {
|
||||
opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension
|
||||
super(undefined, opts);
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob;
|
||||
const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined;
|
||||
if (slice) {
|
||||
slice[0] &&= slice[0] | 0; // int cast
|
||||
slice[1] &&= slice[1] | 0; // int cast
|
||||
this.#slice = slice;
|
||||
slice[0] ??= 0;
|
||||
if (typeof slice[1] === 'undefined') {
|
||||
if (slice[0] < 0) this.#sliceSize = -slice[0];
|
||||
}
|
||||
else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]);
|
||||
else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0];
|
||||
}
|
||||
if (typeof fdOrPath === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPath, 'r+');
|
||||
} catch (err) {
|
||||
this.#error = err as SystemError;
|
||||
}
|
||||
else {
|
||||
this.#fd = fdOrPath;
|
||||
this.#error = Reflect.get(opts, '__error') as SystemError | undefined;
|
||||
}
|
||||
if (!this.#error) {
|
||||
const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] });
|
||||
this.#readable = streams.Readable.toWeb(rstream);
|
||||
}
|
||||
}
|
||||
readonly #readable?: NodeWebReadableStream;
|
||||
readonly #error?: SystemError;
|
||||
readonly #slice: [number?, number?] = [];
|
||||
readonly #sliceSize: number = 0;
|
||||
readonly #fd: number = NaN;
|
||||
#data?: Blob;
|
||||
|
||||
#read() {
|
||||
if (this.#error) throw this.#error;
|
||||
const read = fs.readFileSync(this.#fd);
|
||||
this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type });
|
||||
}
|
||||
|
||||
//! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side
|
||||
get readable(): ReadableStream<any> {
|
||||
if (this.#error) throw this.#error;
|
||||
return this.#readable! as ReadableStream;
|
||||
}
|
||||
|
||||
get lastModified(): number {
|
||||
if (this.#error) throw this.#error;
|
||||
return fs.fstatSync(this.#fd).mtimeMs;
|
||||
}
|
||||
|
||||
async exists(): Promise<boolean> {
|
||||
return !this.#error;
|
||||
}
|
||||
|
||||
writer(): BunFileSink {
|
||||
if (this.#error) throw this.#error;
|
||||
return new FileSink(this.#fd);
|
||||
}
|
||||
|
||||
// TODO: what's contentType?
|
||||
override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob {
|
||||
if (typeof begin === 'string') {
|
||||
contentType = begin;
|
||||
begin = undefined;
|
||||
}
|
||||
if (typeof end === 'string') {
|
||||
contentType = end;
|
||||
end = undefined;
|
||||
}
|
||||
return new FileBlob(this.#fd, {
|
||||
__error: this.#error,
|
||||
__slice: [begin, end],
|
||||
__data: this.#data?.slice(begin, end),
|
||||
} as BlobPropertyBag);
|
||||
}
|
||||
override arrayBuffer(): Promise<ArrayBuffer> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).arrayBuffer();
|
||||
}
|
||||
override text(): Promise<string> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).text();
|
||||
}
|
||||
override json(): Promise<any>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).json();
|
||||
}
|
||||
override stream(): NodeJS.ReadableStream;
|
||||
override stream(): ReadableStream<Uint8Array>;
|
||||
override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).stream();
|
||||
}
|
||||
|
||||
override get size(): number {
|
||||
return this.#data?.size ?? (this.#sliceSize || 0);
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import fs from 'node:fs';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileSink as BunFileSink } from 'bun';
|
||||
|
||||
export class FileSink implements BunFileSink {
|
||||
constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) {
|
||||
if (typeof fdOrPathOrStream === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPathOrStream, 'a+');
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
} catch (err) {
|
||||
throw err as SystemError;
|
||||
}
|
||||
else if (typeof fdOrPathOrStream === 'number') {
|
||||
this.#fd = fdOrPathOrStream; // hope this fd is writable
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
}
|
||||
else {
|
||||
this.#stream = fdOrPathOrStream;
|
||||
}
|
||||
}
|
||||
#fd: number = NaN;
|
||||
#stream: NodeJS.WritableStream | undefined;
|
||||
#closed: boolean = false;
|
||||
#writtenSinceFlush: number = 0;
|
||||
#totalWritten: number = 0;
|
||||
|
||||
start(options?: { highWaterMark?: number | undefined; } | undefined): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
ref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
unref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
if (this.#closed) {
|
||||
return typeof chunk === 'string' ? chunk.length : chunk.byteLength;
|
||||
}
|
||||
if (this.#stream) {
|
||||
let data;
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk);
|
||||
else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer);
|
||||
else data = chunk;
|
||||
this.#stream.write(data);
|
||||
const written = typeof data === 'string' ? data.length : data.byteLength;
|
||||
this.#totalWritten += written;
|
||||
return written;
|
||||
}
|
||||
if (typeof chunk === 'string') {
|
||||
fs.appendFileSync(this.#fd, chunk, 'utf8');
|
||||
this.#writtenSinceFlush += chunk.length;
|
||||
return chunk.length;
|
||||
}
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk));
|
||||
else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer));
|
||||
this.#writtenSinceFlush += chunk.byteLength;
|
||||
return chunk.byteLength;
|
||||
}
|
||||
|
||||
//! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior
|
||||
flush(): number | Promise<number> {
|
||||
if (this.#closed) return 0;
|
||||
// no-op because this is a synchronous implementation
|
||||
const written = this.#writtenSinceFlush;
|
||||
this.#writtenSinceFlush = 0;
|
||||
return written;
|
||||
}
|
||||
|
||||
//! not sure what to do with this error
|
||||
end(error?: Error): number | Promise<number> {
|
||||
if (this.#closed) return this.#totalWritten;
|
||||
const flushed = this.flush();
|
||||
if (this.#stream) {
|
||||
this.#stream.end();
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
this.#totalWritten = fs.fstatSync(this.#fd).size;
|
||||
fs.closeSync(this.#fd);
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
}
|
||||
@@ -1,188 +0,0 @@
|
||||
import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun';
|
||||
import nodecrypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import md4, { Md4 } from 'js-md4';
|
||||
import { wyhash, adler32, crc32, cityhash32, cityhash64, xxhash32, xxhash64, xxhash3, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs';
|
||||
|
||||
export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash;
|
||||
export const bunHashProto: Hash = {
|
||||
wyhash(data, seed = 0n) { return wyhash(data, seed); },
|
||||
adler32(data) { return adler32(data); },
|
||||
crc32(data) { return crc32(data); },
|
||||
cityHash32(data) { return cityhash32(data); },
|
||||
cityHash64(data, seed = 0n) { return cityhash64(data, seed); },
|
||||
xxHash32(data, seed = 0) { return xxhash32(data, seed); },
|
||||
xxHash64(data, seed = 0n) { return xxhash64(data, seed); },
|
||||
xxHash3(data, seed = 0n) { return xxhash3(data, seed); },
|
||||
murmur32v3(data, seed = 0) { return murmur32v3(data, seed); },
|
||||
murmur32v2(data, seed = 0) { return murmur32v2(data, seed); },
|
||||
murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); },
|
||||
};
|
||||
|
||||
type HashImpl = {
|
||||
digest(): Buffer;
|
||||
digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
update(data: nodecrypto.BinaryLike): HashImpl;
|
||||
update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl;
|
||||
};
|
||||
abstract class BaseHash<T> implements CryptoHashInterface<T> {
|
||||
readonly #hash: HashImpl | null;
|
||||
constructor(algorithm: string | HashImpl) {
|
||||
if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm);
|
||||
// If no preset algorithm is given, expect the subclass to fully implement its own.
|
||||
else this.#hash = algorithm;
|
||||
}
|
||||
update(data: StringOrBuffer) {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data));
|
||||
else this.#hash!.update(data);
|
||||
return this as unknown as T; // is there any good way to do this without asserting?
|
||||
}
|
||||
digest(encoding: DigestEncoding): string;
|
||||
digest(hashInto?: TypedArray): TypedArray;
|
||||
digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
if (typeof encodingOrHashInto === 'string') {
|
||||
const encoded = this.#hash!.digest(encodingOrHashInto);
|
||||
// you'd think node would throw an error if the encoding is invalid, but nope!
|
||||
// instead it silently returns as if you passed no encoding and gives a Buffer...
|
||||
if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`);
|
||||
else return encoded;
|
||||
}
|
||||
const digested = this.#hash!.digest();
|
||||
if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength);
|
||||
if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`);
|
||||
if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) {
|
||||
// avoid checking endianness for every loop iteration
|
||||
const endianAwareInsert = os.endianness() === 'LE'
|
||||
? (arr: string[], j: number, num: string) => arr[7 - j] = num
|
||||
: (arr: string[], j: number, num: string) => arr[j] = num;
|
||||
|
||||
for (let i = 0; i < digested.byteLength; i += 8) {
|
||||
const bigintStrArr = ['', '', '', '', '', '', '', ''];
|
||||
for (let j = 0; j < 8; j++) {
|
||||
const byte = digested[i + j];
|
||||
if (byte === undefined) break;
|
||||
endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0'));
|
||||
}
|
||||
encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`);
|
||||
}
|
||||
} else {
|
||||
const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor;
|
||||
// this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes
|
||||
encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT));
|
||||
}
|
||||
return encodingOrHashInto;
|
||||
}
|
||||
static hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' };
|
||||
static readonly byteLength: number;
|
||||
abstract readonly byteLength: number;
|
||||
}
|
||||
|
||||
export class SHA1 extends BaseHash<SHA1> {
|
||||
constructor() { super('sha1'); }
|
||||
static override readonly byteLength = 20;
|
||||
override readonly byteLength = 20;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD4 extends BaseHash<MD4> {
|
||||
constructor() { //! Not supported by nodecrypto
|
||||
const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] };
|
||||
function digest(): Buffer;
|
||||
function digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
function digest(encoding?: nodecrypto.BinaryToTextEncoding) {
|
||||
const buf = Buffer.from(hash.arrayBuffer());
|
||||
if (encoding) return buf.toString(encoding);
|
||||
else return buf;
|
||||
}
|
||||
function update(data: nodecrypto.BinaryLike) {
|
||||
if (typeof data === 'string') hash._update(data);
|
||||
else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data));
|
||||
else hash._update(new Uint8Array(data.buffer));
|
||||
return hash as unknown as MD4HashImpl;
|
||||
}
|
||||
type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update };
|
||||
// @ts-expect-error patches to reuse the BaseHash methods
|
||||
hash.digest = digest; hash._update = hash.update; hash.update = update;
|
||||
super(hash as unknown as MD4HashImpl);
|
||||
}
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD5 extends BaseHash<MD5> {
|
||||
constructor() { super('md5'); }
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA224 extends BaseHash<SHA224> {
|
||||
constructor() { super('sha224'); }
|
||||
static override readonly byteLength = 28;
|
||||
override readonly byteLength = 28;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512 extends BaseHash<SHA512> {
|
||||
constructor() { super('sha512'); }
|
||||
static override readonly byteLength = 64;
|
||||
override readonly byteLength = 64;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA384 extends BaseHash<SHA384> {
|
||||
constructor() { super('sha384'); }
|
||||
static override readonly byteLength = 48;
|
||||
override readonly byteLength = 48;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA256 extends BaseHash<SHA256> {
|
||||
constructor() { super('sha256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512_256 extends BaseHash<SHA512_256> {
|
||||
constructor() { super('sha512-256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user