mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
141 Commits
jarred/git
...
dylan/dev-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a0da7377f7 | ||
|
|
b31bee1e48 | ||
|
|
a4e9a31b94 | ||
|
|
01c97bee80 | ||
|
|
392acbee5a | ||
|
|
8b7888aeee | ||
|
|
f24e8cb98a | ||
|
|
36bedb0bbc | ||
|
|
a63f09784e | ||
|
|
454316ffc3 | ||
|
|
d4a52f77c7 | ||
|
|
c2311ed06c | ||
|
|
05e8a6dd4d | ||
|
|
75902e6a21 | ||
|
|
aa06455987 | ||
|
|
19a855e02b | ||
|
|
b1a0502c0c | ||
|
|
0399ae0ee9 | ||
|
|
dacb75dc1f | ||
|
|
c370645afc | ||
|
|
e1957228f3 | ||
|
|
9feaab47f5 | ||
|
|
d306e65d0e | ||
|
|
7ba4b1d01e | ||
|
|
906b287e31 | ||
|
|
eabbd5cbfb | ||
|
|
068997b529 | ||
|
|
0612dc7bd9 | ||
|
|
8657d705b8 | ||
|
|
2e59e845fa | ||
|
|
00df6cb4ee | ||
|
|
0d4089ea7c | ||
|
|
27c979129c | ||
|
|
8bb835bf63 | ||
|
|
8bf50cf456 | ||
|
|
01e2cb25e3 | ||
|
|
010e715902 | ||
|
|
8b321cc1c6 | ||
|
|
0b9bab34d8 | ||
|
|
61f0cc497b | ||
|
|
764e20ee19 | ||
|
|
0276f5e4a3 | ||
|
|
5a7b5ceb33 | ||
|
|
a04cf04cd5 | ||
|
|
79284376ca | ||
|
|
452000a2ce | ||
|
|
172aecb02e | ||
|
|
ea57037567 | ||
|
|
80309e4d59 | ||
|
|
48c5256196 | ||
|
|
e1ec32caea | ||
|
|
7f55b1af55 | ||
|
|
fbe405fb89 | ||
|
|
cd561c6bba | ||
|
|
1b5c6fcfb5 | ||
|
|
74e65317f2 | ||
|
|
72d43590a1 | ||
|
|
9049b732db | ||
|
|
1e3d82441c | ||
|
|
ca59ed04bd | ||
|
|
fc7e2e912e | ||
|
|
16915504da | ||
|
|
6d03bdfc03 | ||
|
|
034bcf2b57 | ||
|
|
3223da2734 | ||
|
|
dd67cda545 | ||
|
|
a067619f13 | ||
|
|
c9242dae3a | ||
|
|
8d9b56260b | ||
|
|
964f2a8941 | ||
|
|
694a820a34 | ||
|
|
1d48f91b5e | ||
|
|
7839844abb | ||
|
|
9081073ec4 | ||
|
|
386743b508 | ||
|
|
1789f92991 | ||
|
|
c863341bf4 | ||
|
|
03f5a385b2 | ||
|
|
3c1a1b5634 | ||
|
|
c15190990c | ||
|
|
177239cff5 | ||
|
|
ea7068a531 | ||
|
|
46cd5b10a1 | ||
|
|
b87cf4f247 | ||
|
|
d3bc5e391f | ||
|
|
f9712ce309 | ||
|
|
5e0caa0aa4 | ||
|
|
4cf31f6a57 | ||
|
|
3f257a2905 | ||
|
|
ba126fb330 | ||
|
|
2072fa1d59 | ||
|
|
61024b2b4a | ||
|
|
90e3d6c898 | ||
|
|
e8b652a5d9 | ||
|
|
5131e66fa5 | ||
|
|
c019f86f14 | ||
|
|
354391a263 | ||
|
|
17120cefdc | ||
|
|
be7db0d37a | ||
|
|
299c6c9b21 | ||
|
|
f1c2a611ad | ||
|
|
7d9dd67586 | ||
|
|
ccb0ed13c2 | ||
|
|
b58daf86da | ||
|
|
050a9cecb7 | ||
|
|
0a3ac50931 | ||
|
|
fe0bb68d17 | ||
|
|
bc79a48ce4 | ||
|
|
2081e5b656 | ||
|
|
e30d6d21f5 | ||
|
|
81e1a9d54d | ||
|
|
1faeba01b9 | ||
|
|
19540001d1 | ||
|
|
da0bc0b0d2 | ||
|
|
95e12374ed | ||
|
|
4cc61a1b8c | ||
|
|
5416155449 | ||
|
|
c7b1e5c709 | ||
|
|
444b9d1883 | ||
|
|
197c7abe7d | ||
|
|
653c459660 | ||
|
|
25dbe5cf3f | ||
|
|
2cbb196f29 | ||
|
|
064d7bb56e | ||
|
|
37505ad955 | ||
|
|
c40468ea39 | ||
|
|
29dd4166f2 | ||
|
|
0b5363099b | ||
|
|
282dda62c8 | ||
|
|
fd91e3de0d | ||
|
|
633f4f593d | ||
|
|
fd5e777639 | ||
|
|
770c1c8327 | ||
|
|
41d10ed01e | ||
|
|
bb55b2596d | ||
|
|
197443b2db | ||
|
|
b62f70c23a | ||
|
|
d4ccba67f2 | ||
|
|
43777cffee | ||
|
|
3aedf0692c | ||
|
|
346e97dde2 |
@@ -128,11 +128,8 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
|
||||
|
||||
92
.claude/commands/upgrade-nodejs.md
Normal file
92
.claude/commands/upgrade-nodejs.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# Upgrading Bun's Self-Reported Node.js Version
|
||||
|
||||
This guide explains how to upgrade the Node.js version that Bun reports for compatibility with Node.js packages and native addons.
|
||||
|
||||
## Overview
|
||||
|
||||
Bun reports a Node.js version for compatibility with the Node.js ecosystem. This affects:
|
||||
- `process.version` output
|
||||
- Node-API (N-API) compatibility
|
||||
- Native addon ABI compatibility
|
||||
- V8 API compatibility for addons using V8 directly
|
||||
|
||||
## Files That Always Need Updates
|
||||
|
||||
### 1. Bootstrap Scripts
|
||||
- `scripts/bootstrap.sh` - Update `NODEJS_VERSION=`
|
||||
- `scripts/bootstrap.ps1` - Update `$NODEJS_VERSION =`
|
||||
|
||||
### 2. CMake Configuration
|
||||
- `cmake/Options.cmake`
|
||||
- `NODEJS_VERSION` - The Node.js version string (e.g., "24.3.0")
|
||||
- `NODEJS_ABI_VERSION` - The ABI version number (find using command below)
|
||||
|
||||
### 3. Version Strings
|
||||
- `src/bun.js/bindings/BunProcess.cpp`
|
||||
- Update `Bun__versions_node` with the Node.js version
|
||||
- Update `Bun__versions_v8` with the V8 version (find using command below)
|
||||
|
||||
### 4. N-API Version
|
||||
- `src/napi/js_native_api.h`
|
||||
- Update `NAPI_VERSION` define (check Node.js release notes)
|
||||
|
||||
## Files That May Need Updates
|
||||
|
||||
Only check these if the build fails or tests crash after updating version numbers:
|
||||
- V8 compatibility files in `src/bun.js/bindings/v8/` (if V8 API changed)
|
||||
- Test files (if Node.js requires newer C++ standard)
|
||||
|
||||
## Quick Commands to Find Version Info
|
||||
|
||||
```bash
|
||||
# Get latest Node.js version info
|
||||
curl -s https://nodejs.org/dist/index.json | jq '.[0]'
|
||||
|
||||
# Get V8 version for a specific Node.js version (replace v24.3.0)
|
||||
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep V8_VERSION
|
||||
|
||||
# Get ABI version for a specific Node.js version
|
||||
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep NODE_MODULE_VERSION
|
||||
|
||||
# Or use the ABI registry
|
||||
curl -s https://raw.githubusercontent.com/nodejs/node/main/doc/abi_version_registry.json | jq '.NODE_MODULE_VERSION."<version>"'
|
||||
```
|
||||
|
||||
## Update Process
|
||||
|
||||
1. **Gather version info** using the commands above
|
||||
2. **Update the required files** listed in the sections above
|
||||
3. **Build and test**:
|
||||
```bash
|
||||
bun bd
|
||||
bun bd -e "console.log(process.version)"
|
||||
bun bd -e "console.log(process.versions.v8)"
|
||||
bun bd test test/v8/v8.test.ts
|
||||
bun bd test test/napi/napi.test.ts
|
||||
```
|
||||
|
||||
4. **Check for V8 API changes** only if build fails or tests crash:
|
||||
- Compare v8-function-callback.h between versions
|
||||
- Check v8-internal.h for Isolate size changes
|
||||
- Look for new required APIs in build errors
|
||||
|
||||
## If Build Fails or Tests Crash
|
||||
|
||||
The V8 API rarely has breaking changes between minor Node.js versions. If you encounter issues:
|
||||
1. Check build errors for missing symbols or type mismatches
|
||||
2. Compare V8 headers between old and new Node.js versions
|
||||
3. Most issues can be resolved by implementing missing functions or adjusting structures
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] `process.version` returns correct version
|
||||
- [ ] `process.versions.v8` returns correct V8 version
|
||||
- [ ] `process.config.variables.node_module_version` returns correct ABI
|
||||
- [ ] V8 tests pass
|
||||
- [ ] N-API tests pass
|
||||
|
||||
## Notes
|
||||
|
||||
- Most upgrades only require updating version numbers
|
||||
- Major V8 version changes (rare) may require API updates
|
||||
- The V8 shim implements only APIs used by common native addons
|
||||
23
.claude/commands/upgrade-webkit.md
Normal file
23
.claude/commands/upgrade-webkit.md
Normal file
@@ -0,0 +1,23 @@
|
||||
Upgrade Bun's Webkit fork to the latest upstream version of Webkit.
|
||||
|
||||
To do that:
|
||||
|
||||
- cd vendor/WebKit
|
||||
- git fetch upstream
|
||||
- git merge upstream main
|
||||
- Fix the merge conflicts
|
||||
- cd ../../ (back to bun)
|
||||
- make jsc-build (this will take about 7 minutes)
|
||||
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
|
||||
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
|
||||
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
|
||||
- cd vendor/WebKit
|
||||
- git commit -am "Upgrade Webkit to the latest version"
|
||||
- git push
|
||||
- get the commit SHA in the vendor/WebKit directory of your new commit
|
||||
- cd ../../ (back to bun)
|
||||
- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit
|
||||
- git checkout -b bun/webkit-upgrade-<commit-sha>
|
||||
- commit + push (without adding the webkit-changes.md file)
|
||||
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
|
||||
- delete the webkit-changes.md file
|
||||
10
.cursor/environment.json
Normal file
10
.cursor/environment.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"snapshot": "snapshot-20250706-71021aff-cc0d-4a7f-a468-d443b16c4bf1",
|
||||
"install": "bun install",
|
||||
"terminals": [
|
||||
{
|
||||
"name": "bun build",
|
||||
"command": "bun run build"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,13 +1,41 @@
|
||||
---
|
||||
description: How to build Bun
|
||||
globs:
|
||||
description:
|
||||
globs: src/**/*.cpp,src/**/*.zig
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# How to build Bun
|
||||
### Build Commands
|
||||
|
||||
Run:
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
```bash
|
||||
bun bd
|
||||
### Run a file
|
||||
|
||||
To run a file, use:
|
||||
|
||||
```sh
|
||||
bun bd <file> <...args>
|
||||
```
|
||||
|
||||
**CRITICAL**: Never use `bun <file>` directly. It will not have your changes.
|
||||
|
||||
### Logging
|
||||
|
||||
`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope.
|
||||
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, false);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. There are no commands to run.
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
---
|
||||
description: How Zig works with JavaScriptCore bindings generator
|
||||
globs:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
|
||||
@@ -24,7 +25,7 @@ The `.classes.ts` files define the JavaScript API using a declarative approach:
|
||||
```typescript
|
||||
// Example: encoding.classes.ts
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
@@ -40,17 +41,18 @@ define({
|
||||
},
|
||||
fatal: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
getter: true,
|
||||
},
|
||||
ignoreBOM: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Each class definition specifies:
|
||||
|
||||
- The class name
|
||||
- Whether it has a constructor
|
||||
- JavaScript type (object, function, etc.)
|
||||
@@ -87,7 +89,7 @@ pub const TextDecoder = struct {
|
||||
// Fields
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Prototype methods - note return type includes JSError
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
@@ -96,23 +98,23 @@ pub const TextDecoder = struct {
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
|
||||
// Getters
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
|
||||
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.fatal);
|
||||
}
|
||||
|
||||
|
||||
// Cleanup - note standard pattern of using deinit/deref
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release any retained resources
|
||||
// Free the pointer at the end.
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
|
||||
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
@@ -121,6 +123,7 @@ pub const TextDecoder = struct {
|
||||
```
|
||||
|
||||
Key components in the Zig file:
|
||||
|
||||
- The struct containing native state
|
||||
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
|
||||
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
|
||||
@@ -128,6 +131,7 @@ Key components in the Zig file:
|
||||
- Methods matching the JavaScript interface
|
||||
- Getters/setters for properties
|
||||
- Proper resource cleanup pattern with `deinit()` and `finalize()`
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class
|
||||
|
||||
## Code Generation System
|
||||
|
||||
@@ -140,6 +144,7 @@ The binding generator produces C++ code that connects JavaScript and Zig:
|
||||
5. **Property Caching**: Implements the caching system for properties
|
||||
|
||||
The generated C++ code includes:
|
||||
|
||||
- A JSC wrapper class (`JSTextDecoder`)
|
||||
- A prototype class (`JSTextDecoderPrototype`)
|
||||
- A constructor function (`JSTextDecoderConstructor`)
|
||||
@@ -152,28 +157,29 @@ The `CallFrame` object provides access to JavaScript execution context:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Get arguments
|
||||
const input = callFrame.argument(0);
|
||||
const options = callFrame.argument(1);
|
||||
|
||||
|
||||
// Get this value
|
||||
const thisValue = callFrame.thisValue();
|
||||
|
||||
|
||||
// Implementation with error handling
|
||||
if (input.isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null or undefined", .{});
|
||||
}
|
||||
|
||||
|
||||
// Return value or throw error
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
```
|
||||
|
||||
CallFrame methods include:
|
||||
|
||||
- `argument(i)`: Get the i-th argument
|
||||
- `argumentCount()`: Get the number of arguments
|
||||
- `thisValue()`: Get the `this` value
|
||||
@@ -201,17 +207,17 @@ JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
|
||||
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
|
||||
|
||||
|
||||
// Check for cached value and return if present
|
||||
if (JSValue cachedValue = thisObject->m_encoding.get())
|
||||
return JSValue::encode(cachedValue);
|
||||
|
||||
|
||||
// Get value from Zig implementation
|
||||
JSC::JSValue result = JSC::JSValue::decode(
|
||||
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
|
||||
);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
|
||||
// Store in cache for future access
|
||||
thisObject->m_encoding.set(vm, thisObject, result);
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
|
||||
@@ -253,7 +259,7 @@ This system provides several key benefits:
|
||||
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
|
||||
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
|
||||
3. **Consistent Access**: Zig code can easily get/set these cached JS values
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
|
||||
### Use Cases
|
||||
|
||||
@@ -281,7 +287,7 @@ Bun uses a consistent pattern for resource cleanup:
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
// Release resources like strings
|
||||
this._encoding.deref(); // String deref pattern
|
||||
|
||||
|
||||
// Free any buffers
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
@@ -312,7 +318,7 @@ Bun uses `bun.JSError!JSValue` return type for proper error handling:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
@@ -320,13 +326,14 @@ pub fn decode(
|
||||
if (callFrame.argumentCount() < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
|
||||
|
||||
// Or returning a success value
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
This pattern allows Zig functions to:
|
||||
|
||||
1. Return JavaScript values on success
|
||||
2. Throw JavaScript exceptions on error
|
||||
3. Propagate errors automatically through the call stack
|
||||
@@ -339,7 +346,7 @@ The binding system includes robust error handling:
|
||||
// Example of type checking in generated code
|
||||
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
|
||||
return {};
|
||||
}
|
||||
@@ -351,7 +358,7 @@ The binding system creates proper JavaScript prototype chains:
|
||||
|
||||
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
|
||||
2. **Prototype**: JSTextDecoderPrototype with methods and properties
|
||||
3. **Instances**: Each JSTextDecoder instance with __proto__ pointing to prototype
|
||||
3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype
|
||||
|
||||
This ensures JavaScript inheritance works as expected:
|
||||
|
||||
@@ -360,7 +367,7 @@ This ensures JavaScript inheritance works as expected:
|
||||
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
|
||||
|
||||
|
||||
// Set up the prototype chain
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
ASSERT(inherits(info()));
|
||||
@@ -372,7 +379,7 @@ void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globa
|
||||
The binding system is optimized for performance:
|
||||
|
||||
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
|
||||
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
|
||||
|
||||
@@ -381,6 +388,7 @@ The binding system is optimized for performance:
|
||||
To create a new class binding in Bun:
|
||||
|
||||
1. **Define the class interface** in a `.classes.ts` file:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
@@ -393,12 +401,13 @@ To create a new class binding in Bun:
|
||||
myProperty: {
|
||||
getter: true,
|
||||
cache: true,
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. **Implement the native functionality** in a `.zig` file:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
// Generated bindings
|
||||
@@ -409,9 +418,9 @@ To create a new class binding in Bun:
|
||||
|
||||
// State
|
||||
value: []const u8,
|
||||
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
|
||||
// Constructor
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
@@ -420,7 +429,7 @@ To create a new class binding in Bun:
|
||||
const arg = callFrame.argument(0);
|
||||
// Implementation
|
||||
}
|
||||
|
||||
|
||||
// Method
|
||||
pub fn myMethod(
|
||||
this: *MyClass,
|
||||
@@ -429,17 +438,17 @@ To create a new class binding in Bun:
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
|
||||
// Getter
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
|
||||
// Resource cleanup
|
||||
pub fn deinit(this: *MyClass) void {
|
||||
// Clean up resources
|
||||
}
|
||||
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
@@ -474,11 +483,13 @@ For each Zig class, the system generates:
|
||||
### 3. Zig Bindings
|
||||
|
||||
- **External Function Declarations**:
|
||||
|
||||
```zig
|
||||
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
|
||||
```
|
||||
|
||||
- **Cached Value Accessors**:
|
||||
|
||||
```zig
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
|
||||
|
||||
13
.github/CODEOWNERS
vendored
13
.github/CODEOWNERS
vendored
@@ -1,18 +1,5 @@
|
||||
# Project
|
||||
/.github/CODEOWNERS @Jarred-Sumner
|
||||
|
||||
# Build system
|
||||
/CMakeLists.txt @Electroid
|
||||
/cmake/*.cmake @Electroid
|
||||
/scripts/ @Electroid
|
||||
|
||||
# CI
|
||||
/.buildkite/ @Electroid
|
||||
/.github/workflows/ @Electroid
|
||||
|
||||
# Debugger protocol
|
||||
/packages/bun-inspector-protocol/ @Electroid
|
||||
/packages/bun-debug-adapter-protocol/ @Electroid
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
2
.github/workflows/format.yml
vendored
2
.github/workflows/format.yml
vendored
@@ -46,6 +46,8 @@ jobs:
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
1005
.vscode/launch.json
generated
vendored
1005
.vscode/launch.json
generated
vendored
File diff suppressed because it is too large
Load Diff
36
AGENTS.md
36
AGENTS.md
@@ -1,36 +0,0 @@
|
||||
## bun tests
|
||||
|
||||
**IMPORTANT**: use the `bun bd` command instead of the `bun` command. For example:
|
||||
|
||||
✅ Good
|
||||
|
||||
```sh
|
||||
bun bd test internal/ban-words.test.ts
|
||||
bun bd ./foo.ts
|
||||
```
|
||||
|
||||
The `bun bd` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
|
||||
|
||||
### Run a file
|
||||
|
||||
To run a file, you can use the `bun bd <file-path>` command.
|
||||
|
||||
```sh
|
||||
bun bd ./foo.ts
|
||||
```
|
||||
|
||||
### Run tests
|
||||
|
||||
To run a single test, you need to use the `bun bd test <test-name>` command.
|
||||
|
||||
```sh
|
||||
bun bd test internal/ban-words.test.ts
|
||||
```
|
||||
|
||||
You must ALWAYS make sure to pass a file path to the `bun bd test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
|
||||
|
||||
### Run a Node.js test
|
||||
|
||||
```sh
|
||||
bun bd --silent node:test test-fs-link
|
||||
```
|
||||
245
CLAUDE.md
Normal file
245
CLAUDE.md
Normal file
@@ -0,0 +1,245 @@
|
||||
This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed for speed, with a bundler, test runner, and Node.js-compatible package manager. It's written primarily in Zig with C++ for JavaScriptCore integration, powered by WebKit's JavaScriptCore engine.
|
||||
|
||||
## Building and Running Bun
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
|
||||
## Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
- **Single test file**: `bun bd test test/js/bun/http/serve.test.ts`
|
||||
- **Fuzzy match test file**: `bun bd test http/serve.test.ts`
|
||||
- **With filter**: `bun bd test test/js/bun/http/serve.test.ts -t "should handle"`
|
||||
|
||||
### Test Organization
|
||||
|
||||
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
|
||||
- `test/js/node/` - Node.js compatibility tests
|
||||
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
|
||||
- `test/cli/` - CLI command tests (install, run, test, etc.)
|
||||
- `test/regression/issue/` - Regression tests (create one per bug fix)
|
||||
- `test/bundler/` - Bundler and transpiler tests
|
||||
- `test/integration/` - End-to-end integration tests
|
||||
- `test/napi/` - N-API compatibility tests
|
||||
- `test/v8/` - V8 C++ API compatibility tests
|
||||
|
||||
### Writing Tests
|
||||
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
// Spawn Bun process
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toBe("hello\n");
|
||||
});
|
||||
```
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
|
||||
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
|
||||
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
|
||||
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
|
||||
|
||||
### Core Source Organization
|
||||
|
||||
#### Runtime Core (`src/`)
|
||||
|
||||
- `bun.zig` - Main entry point
|
||||
- `cli.zig` - CLI command orchestration
|
||||
- `js_parser.zig`, `js_lexer.zig`, `js_printer.zig` - JavaScript parsing/printing
|
||||
- `transpiler.zig` - Wrapper around js_parser with sourcemap support
|
||||
- `resolver/` - Module resolution system
|
||||
- `allocators/` - Custom memory allocators for performance
|
||||
|
||||
#### JavaScript Runtime (`src/bun.js/`)
|
||||
|
||||
- `bindings/` - C++ JavaScriptCore bindings
|
||||
- Generated classes from `.classes.ts` files
|
||||
- Manual bindings for complex APIs
|
||||
- `api/` - Bun-specific APIs
|
||||
- `server.zig` - HTTP server implementation
|
||||
- `FFI.zig` - Foreign Function Interface
|
||||
- `crypto.zig` - Cryptographic operations
|
||||
- `glob.zig` - File pattern matching
|
||||
- `node/` - Node.js compatibility layer
|
||||
- Module implementations (fs, path, crypto, etc.)
|
||||
- Process and Buffer APIs
|
||||
- `webcore/` - Web API implementations
|
||||
- `fetch.zig` - Fetch API
|
||||
- `streams.zig` - Web Streams
|
||||
- `Blob.zig`, `Response.zig`, `Request.zig`
|
||||
- `event_loop/` - Event loop and task management
|
||||
|
||||
#### Build Tools & Package Manager
|
||||
|
||||
- `src/bundler/` - JavaScript bundler
|
||||
- Advanced tree-shaking
|
||||
- CSS processing
|
||||
- HTML handling
|
||||
- `src/install/` - Package manager
|
||||
- `lockfile/` - Lockfile handling
|
||||
- `npm.zig` - npm registry client
|
||||
- `lifecycle_script_runner.zig` - Package scripts
|
||||
|
||||
#### Other Key Components
|
||||
|
||||
- `src/shell/` - Cross-platform shell implementation
|
||||
- `src/css/` - CSS parser and processor
|
||||
- `src/http/` - HTTP client implementation
|
||||
- `websocket_client/` - WebSocket client (including deflate support)
|
||||
- `src/sql/` - SQL database integrations
|
||||
- `src/bake/` - Server-side rendering framework
|
||||
|
||||
### JavaScript Class Implementation (C++)
|
||||
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
|
||||
2. Define properties using HashTableValue arrays
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
|
||||
- `src/codegen/generate-classes.ts` - Generates Zig & C++ bindings from `*.classes.ts` files
|
||||
- `src/codegen/generate-jssink.ts` - Generates stream-related classes
|
||||
- `src/codegen/bundle-modules.ts` - Bundles built-in modules like `node:fs`
|
||||
- `src/codegen/bundle-functions.ts` - Bundles global functions like `ReadableStream`
|
||||
|
||||
In development, bundled modules can be reloaded without rebuilding Zig by running `bun run build`.
|
||||
|
||||
## JavaScript Modules (`src/js/`)
|
||||
|
||||
Built-in JavaScript modules use special syntax and are organized as:
|
||||
|
||||
- `node/` - Node.js compatibility modules (`node:fs`, `node:path`, etc.)
|
||||
- `bun/` - Bun-specific modules (`bun:ffi`, `bun:sqlite`, etc.)
|
||||
- `thirdparty/` - NPM modules we replace (like `ws`)
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## CI
|
||||
|
||||
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
|
||||
|
||||
```bash
|
||||
bun ci
|
||||
```
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
@@ -144,6 +144,14 @@ $ bun bd test foo.test.ts
|
||||
$ bun bd ./foo.ts
|
||||
```
|
||||
|
||||
Bun generally takes about 2.5 minutes to compile a debug build when there are Zig changes. If your development workflow is "change one line, save, rebuild", you will spend too much time waiting for the build to finish. Instead:
|
||||
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
|
||||
@@ -179,6 +187,7 @@ To run a release build from a pull request, you can use the `bun-pr` npm package
|
||||
bunx bun-pr <pr-number>
|
||||
bunx bun-pr <branch-name>
|
||||
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
|
||||
bunx bun-pr --asan <pr-number> # Linux x64 only
|
||||
```
|
||||
|
||||
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
|
||||
@@ -189,24 +198,18 @@ bun-1234566 --version
|
||||
|
||||
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
|
||||
|
||||
## Valgrind
|
||||
## AddressSanitizer
|
||||
|
||||
On Linux, valgrind can help find memory issues.
|
||||
[AddressSanitizer](https://en.wikipedia.org/wiki/AddressSanitizer) helps find memory issues, and is enabled by default in debug builds of Bun on Linux and macOS. This includes the Zig code and all dependencies. It makes the Zig code take about 2x longer to build, if that's stopping you from being productive you can disable it by setting `-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>` to `-Denable_asan=false` in the `cmake/targets/BuildBun.cmake` file, but generally we recommend batching your changes up between builds.
|
||||
|
||||
Keep in mind:
|
||||
|
||||
- JavaScriptCore doesn't support valgrind. It will report spurious errors.
|
||||
- Valgrind is slow
|
||||
- Mimalloc will sometimes cause spurious errors when debug build is enabled
|
||||
|
||||
You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager.
|
||||
|
||||
`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang.
|
||||
To build a release build with Address Sanitizer, run:
|
||||
|
||||
```bash
|
||||
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
$ bun run build:release:asan
|
||||
```
|
||||
|
||||
In CI, we run our test suite with at least one target that is built with Address Sanitizer.
|
||||
|
||||
## Building WebKit locally + Debug mode of JSC
|
||||
|
||||
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
|
||||
|
||||
28
bench/snippets/source-map.js
Normal file
28
bench/snippets/source-map.js
Normal file
@@ -0,0 +1,28 @@
|
||||
import { SourceMap } from "node:module";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const json = JSON.parse(readFileSync(process.argv.at(-1), "utf-8"));
|
||||
|
||||
bench("new SourceMap(json)", () => {
|
||||
return new SourceMap(json);
|
||||
});
|
||||
|
||||
const map = new SourceMap(json);
|
||||
|
||||
const toRotate = [];
|
||||
for (let j = 0; j < 10000; j++) {
|
||||
if (map.findEntry(0, j).generatedColumn) {
|
||||
toRotate.push(j);
|
||||
if (toRotate.length > 5) break;
|
||||
}
|
||||
}
|
||||
let i = 0;
|
||||
bench("findEntry (match)", () => {
|
||||
return map.findEntry(0, toRotate[i++ % 3]).generatedColumn;
|
||||
});
|
||||
|
||||
bench("findEntry (no match)", () => {
|
||||
return map.findEntry(0, 9999).generatedColumn;
|
||||
});
|
||||
|
||||
await run();
|
||||
20
build.zig
20
build.zig
@@ -390,6 +390,12 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
@@ -397,6 +403,13 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-macos-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
@@ -404,6 +417,13 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-linux-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
|
||||
// zig build translate-c-headers
|
||||
{
|
||||
|
||||
11
bun.lock
11
bun.lock
@@ -4,7 +4,6 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -29,13 +28,17 @@
|
||||
"@types/node": "*",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
},
|
||||
},
|
||||
},
|
||||
"overrides": {
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
},
|
||||
"packages": {
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
|
||||
@@ -88,9 +91,7 @@
|
||||
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
|
||||
"@types/prop-types": ["@types/prop-types@15.7.14", "", {}, "sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ=="],
|
||||
|
||||
"@types/react": ["@types/react@18.3.21", "", { "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" } }, "sha512-gXLBtmlcRJeT09/sI4PxVwyrku6SaNUj/6cMubjE6T6XdY1fDmBL7r0nX0jbSZPU/Xr0KuwLLZh6aOYY5d91Xw=="],
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types"],
|
||||
|
||||
|
||||
@@ -139,10 +139,10 @@ endif()
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "22.6.0")
|
||||
optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "24.3.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "127")
|
||||
optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "137")
|
||||
|
||||
if(APPLE)
|
||||
set(DEFAULT_STATIC_SQLITE OFF)
|
||||
|
||||
31
cmake/scripts/PrepareNodeHeaders.cmake
Normal file
31
cmake/scripts/PrepareNodeHeaders.cmake
Normal file
@@ -0,0 +1,31 @@
|
||||
# This script prepares Node.js headers for use with Bun
|
||||
# It removes conflicting OpenSSL and libuv headers since Bun uses BoringSSL and its own libuv
|
||||
|
||||
if(NOT DEFINED NODE_INCLUDE_DIR)
|
||||
message(FATAL_ERROR "NODE_INCLUDE_DIR not defined")
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${NODE_INCLUDE_DIR}/node")
|
||||
message(FATAL_ERROR "Node headers not found at ${NODE_INCLUDE_DIR}/node")
|
||||
endif()
|
||||
|
||||
# Remove OpenSSL headers that conflict with BoringSSL
|
||||
if(EXISTS "${NODE_INCLUDE_DIR}/node/openssl")
|
||||
file(REMOVE_RECURSE "${NODE_INCLUDE_DIR}/node/openssl")
|
||||
message(STATUS "Removed conflicting OpenSSL headers")
|
||||
endif()
|
||||
|
||||
# Remove libuv headers that might conflict
|
||||
if(EXISTS "${NODE_INCLUDE_DIR}/node/uv")
|
||||
file(REMOVE_RECURSE "${NODE_INCLUDE_DIR}/node/uv")
|
||||
message(STATUS "Removed conflicting libuv headers")
|
||||
endif()
|
||||
|
||||
if(EXISTS "${NODE_INCLUDE_DIR}/node/uv.h")
|
||||
file(REMOVE "${NODE_INCLUDE_DIR}/node/uv.h")
|
||||
message(STATUS "Removed conflicting uv.h header")
|
||||
endif()
|
||||
|
||||
# Add the node directory to include path for cppgc
|
||||
# This is needed because cppgc internal headers use relative includes
|
||||
file(WRITE "${NODE_INCLUDE_DIR}/.node-headers-prepared" "1")
|
||||
@@ -28,6 +28,7 @@ src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
@@ -99,6 +100,9 @@ src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
|
||||
@@ -7,12 +7,14 @@ src/bun.js/api/h2.classes.ts
|
||||
src/bun.js/api/html_rewriter.classes.ts
|
||||
src/bun.js/api/JSBundler.classes.ts
|
||||
src/bun.js/api/postgres.classes.ts
|
||||
src/bun.js/api/ResumableSink.classes.ts
|
||||
src/bun.js/api/S3Client.classes.ts
|
||||
src/bun.js/api/S3Stat.classes.ts
|
||||
src/bun.js/api/server.classes.ts
|
||||
src/bun.js/api/Shell.classes.ts
|
||||
src/bun.js/api/ShellArgs.classes.ts
|
||||
src/bun.js/api/sockets.classes.ts
|
||||
src/bun.js/api/sourcemap.classes.ts
|
||||
src/bun.js/api/streams.classes.ts
|
||||
src/bun.js/api/valkey.classes.ts
|
||||
src/bun.js/api/zlib.classes.ts
|
||||
|
||||
@@ -10,7 +10,26 @@ src/allocators/NullableAllocator.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/api/schema.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
src/ast/base.zig
|
||||
src/ast/Binding.zig
|
||||
src/ast/BundledAst.zig
|
||||
src/ast/CharFreq.zig
|
||||
src/ast/E.zig
|
||||
src/ast/Expr.zig
|
||||
src/ast/G.zig
|
||||
src/ast/Macro.zig
|
||||
src/ast/NewStore.zig
|
||||
src/ast/Op.zig
|
||||
src/ast/S.zig
|
||||
src/ast/Scope.zig
|
||||
src/ast/ServerComponentBoundary.zig
|
||||
src/ast/Stmt.zig
|
||||
src/ast/Symbol.zig
|
||||
src/ast/TS.zig
|
||||
src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
@@ -85,6 +104,7 @@ src/bun.js/bindings/AnyPromise.zig
|
||||
src/bun.js/bindings/bun-simdutf.zig
|
||||
src/bun.js/bindings/CachedBytecode.zig
|
||||
src/bun.js/bindings/CallFrame.zig
|
||||
src/bun.js/bindings/CatchScope.zig
|
||||
src/bun.js/bindings/codegen.zig
|
||||
src/bun.js/bindings/CommonAbortReason.zig
|
||||
src/bun.js/bindings/CommonStrings.zig
|
||||
@@ -251,6 +271,7 @@ src/bun.js/webcore/prompt.zig
|
||||
src/bun.js/webcore/ReadableStream.zig
|
||||
src/bun.js/webcore/Request.zig
|
||||
src/bun.js/webcore/Response.zig
|
||||
src/bun.js/webcore/ResumableSink.zig
|
||||
src/bun.js/webcore/S3Client.zig
|
||||
src/bun.js/webcore/S3File.zig
|
||||
src/bun.js/webcore/S3Stat.zig
|
||||
@@ -323,6 +344,7 @@ src/cli/package_manager_command.zig
|
||||
src/cli/patch_command.zig
|
||||
src/cli/patch_commit_command.zig
|
||||
src/cli/pm_trusted_command.zig
|
||||
src/cli/pm_version_command.zig
|
||||
src/cli/pm_view_command.zig
|
||||
src/cli/publish_command.zig
|
||||
src/cli/remove_command.zig
|
||||
@@ -502,12 +524,29 @@ src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
src/http/header_builder.zig
|
||||
src/http/method.zig
|
||||
src/http/mime_type.zig
|
||||
src/http/url_path.zig
|
||||
src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
src/http/HTTPCertError.zig
|
||||
src/http/HTTPContext.zig
|
||||
src/http/HTTPRequestBody.zig
|
||||
src/http/HTTPThread.zig
|
||||
src/http/InitError.zig
|
||||
src/http/InternalState.zig
|
||||
src/http/Method.zig
|
||||
src/http/MimeType.zig
|
||||
src/http/ProxyTunnel.zig
|
||||
src/http/SendFile.zig
|
||||
src/http/Signals.zig
|
||||
src/http/ThreadSafeStreamBuffer.zig
|
||||
src/http/URLPath.zig
|
||||
src/http/websocket_client.zig
|
||||
src/http/websocket_client/CppWebSocket.zig
|
||||
src/http/websocket_client/WebSocketDeflate.zig
|
||||
src/http/websocket_client/WebSocketUpgradeClient.zig
|
||||
src/http/websocket_http_client.zig
|
||||
src/http/websocket.zig
|
||||
@@ -517,11 +556,17 @@ src/import_record.zig
|
||||
src/ini.zig
|
||||
src/install/bin.zig
|
||||
src/install/dependency.zig
|
||||
src/install/ExternalSlice.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/hoisted_install.zig
|
||||
src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/isolated_install.zig
|
||||
src/install/isolated_install/Hardlinker.zig
|
||||
src/install/isolated_install/Installer.zig
|
||||
src/install/isolated_install/Store.zig
|
||||
src/install/isolated_install/Symlinker.zig
|
||||
src/install/lifecycle_script_runner.zig
|
||||
src/install/lockfile.zig
|
||||
src/install/lockfile/Buffers.zig
|
||||
@@ -538,12 +583,28 @@ src/install/lockfile/printer/tree_printer.zig
|
||||
src/install/lockfile/printer/Yarn.zig
|
||||
src/install/lockfile/Tree.zig
|
||||
src/install/migration.zig
|
||||
src/install/NetworkTask.zig
|
||||
src/install/npm.zig
|
||||
src/install/PackageInstall.zig
|
||||
src/install/PackageInstaller.zig
|
||||
src/install/PackageManager.zig
|
||||
src/install/PackageManager/CommandLineArguments.zig
|
||||
src/install/PackageManager/install_with_manager.zig
|
||||
src/install/PackageManager/PackageJSONEditor.zig
|
||||
src/install/PackageManager/PackageManagerDirectories.zig
|
||||
src/install/PackageManager/PackageManagerEnqueue.zig
|
||||
src/install/PackageManager/PackageManagerLifecycle.zig
|
||||
src/install/PackageManager/PackageManagerOptions.zig
|
||||
src/install/PackageManager/PackageManagerResolution.zig
|
||||
src/install/PackageManager/patchPackage.zig
|
||||
src/install/PackageManager/processDependencyList.zig
|
||||
src/install/PackageManager/ProgressStrings.zig
|
||||
src/install/PackageManager/runTasks.zig
|
||||
src/install/PackageManager/updatePackageJSONAndInstall.zig
|
||||
src/install/PackageManager/UpdateRequest.zig
|
||||
src/install/PackageManager/WorkspacePackageJSONCache.zig
|
||||
src/install/PackageManagerTask.zig
|
||||
src/install/PackageManifestMap.zig
|
||||
src/install/padding_checker.zig
|
||||
src/install/patch_install.zig
|
||||
src/install/repository.zig
|
||||
@@ -555,6 +616,7 @@ src/install/windows-shim/bun_shim_impl.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
src/io/openForWriting.zig
|
||||
src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
@@ -587,6 +649,10 @@ src/options.zig
|
||||
src/output.zig
|
||||
src/OutputFile.zig
|
||||
src/patch.zig
|
||||
src/paths.zig
|
||||
src/paths/EnvPath.zig
|
||||
src/paths/path_buffer_pool.zig
|
||||
src/paths/Path.zig
|
||||
src/perf.zig
|
||||
src/pool.zig
|
||||
src/Progress.zig
|
||||
@@ -625,6 +691,7 @@ src/semver/SemverString.zig
|
||||
src/semver/SlicedString.zig
|
||||
src/semver/Version.zig
|
||||
src/sha.zig
|
||||
src/shell/AllocScope.zig
|
||||
src/shell/braces.zig
|
||||
src/shell/Builtin.zig
|
||||
src/shell/builtin/basename.zig
|
||||
@@ -669,14 +736,83 @@ src/shell/states/Stmt.zig
|
||||
src/shell/states/Subshell.zig
|
||||
src/shell/subproc.zig
|
||||
src/shell/util.zig
|
||||
src/shell/Yield.zig
|
||||
src/sourcemap/CodeCoverage.zig
|
||||
src/sourcemap/JSSourceMap.zig
|
||||
src/sourcemap/LineOffsetTable.zig
|
||||
src/sourcemap/sourcemap.zig
|
||||
src/sourcemap/VLQ.zig
|
||||
src/sql/DataCell.zig
|
||||
src/sql/postgres.zig
|
||||
src/sql/postgres/postgres_protocol.zig
|
||||
src/sql/postgres/postgres_types.zig
|
||||
src/sql/postgres/AnyPostgresError.zig
|
||||
src/sql/postgres/AuthenticationState.zig
|
||||
src/sql/postgres/CommandTag.zig
|
||||
src/sql/postgres/ConnectionFlags.zig
|
||||
src/sql/postgres/Data.zig
|
||||
src/sql/postgres/DataCell.zig
|
||||
src/sql/postgres/DebugSocketMonitorReader.zig
|
||||
src/sql/postgres/DebugSocketMonitorWriter.zig
|
||||
src/sql/postgres/ObjectIterator.zig
|
||||
src/sql/postgres/PostgresCachedStructure.zig
|
||||
src/sql/postgres/PostgresProtocol.zig
|
||||
src/sql/postgres/PostgresRequest.zig
|
||||
src/sql/postgres/PostgresSQLConnection.zig
|
||||
src/sql/postgres/PostgresSQLContext.zig
|
||||
src/sql/postgres/PostgresSQLQuery.zig
|
||||
src/sql/postgres/PostgresSQLQueryResultMode.zig
|
||||
src/sql/postgres/PostgresSQLStatement.zig
|
||||
src/sql/postgres/PostgresTypes.zig
|
||||
src/sql/postgres/protocol/ArrayList.zig
|
||||
src/sql/postgres/protocol/Authentication.zig
|
||||
src/sql/postgres/protocol/BackendKeyData.zig
|
||||
src/sql/postgres/protocol/Close.zig
|
||||
src/sql/postgres/protocol/ColumnIdentifier.zig
|
||||
src/sql/postgres/protocol/CommandComplete.zig
|
||||
src/sql/postgres/protocol/CopyData.zig
|
||||
src/sql/postgres/protocol/CopyFail.zig
|
||||
src/sql/postgres/protocol/CopyInResponse.zig
|
||||
src/sql/postgres/protocol/CopyOutResponse.zig
|
||||
src/sql/postgres/protocol/DataRow.zig
|
||||
src/sql/postgres/protocol/DecoderWrap.zig
|
||||
src/sql/postgres/protocol/Describe.zig
|
||||
src/sql/postgres/protocol/ErrorResponse.zig
|
||||
src/sql/postgres/protocol/Execute.zig
|
||||
src/sql/postgres/protocol/FieldDescription.zig
|
||||
src/sql/postgres/protocol/FieldMessage.zig
|
||||
src/sql/postgres/protocol/FieldType.zig
|
||||
src/sql/postgres/protocol/NegotiateProtocolVersion.zig
|
||||
src/sql/postgres/protocol/NewReader.zig
|
||||
src/sql/postgres/protocol/NewWriter.zig
|
||||
src/sql/postgres/protocol/NoticeResponse.zig
|
||||
src/sql/postgres/protocol/NotificationResponse.zig
|
||||
src/sql/postgres/protocol/ParameterDescription.zig
|
||||
src/sql/postgres/protocol/ParameterStatus.zig
|
||||
src/sql/postgres/protocol/Parse.zig
|
||||
src/sql/postgres/protocol/PasswordMessage.zig
|
||||
src/sql/postgres/protocol/PortalOrPreparedStatement.zig
|
||||
src/sql/postgres/protocol/ReadyForQuery.zig
|
||||
src/sql/postgres/protocol/RowDescription.zig
|
||||
src/sql/postgres/protocol/SASLInitialResponse.zig
|
||||
src/sql/postgres/protocol/SASLResponse.zig
|
||||
src/sql/postgres/protocol/StackReader.zig
|
||||
src/sql/postgres/protocol/StartupMessage.zig
|
||||
src/sql/postgres/protocol/TransactionStatusIndicator.zig
|
||||
src/sql/postgres/protocol/WriteWrap.zig
|
||||
src/sql/postgres/protocol/zHelpers.zig
|
||||
src/sql/postgres/QueryBindingIterator.zig
|
||||
src/sql/postgres/SASL.zig
|
||||
src/sql/postgres/Signature.zig
|
||||
src/sql/postgres/SocketMonitor.zig
|
||||
src/sql/postgres/SSLMode.zig
|
||||
src/sql/postgres/Status.zig
|
||||
src/sql/postgres/TLSStatus.zig
|
||||
src/sql/postgres/types/bool.zig
|
||||
src/sql/postgres/types/bytea.zig
|
||||
src/sql/postgres/types/date.zig
|
||||
src/sql/postgres/types/int_types.zig
|
||||
src/sql/postgres/types/json.zig
|
||||
src/sql/postgres/types/numeric.zig
|
||||
src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
|
||||
@@ -650,8 +650,13 @@ register_command(
|
||||
-DDOWNLOAD_PATH=${NODEJS_HEADERS_PATH}
|
||||
-DDOWNLOAD_URL=https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-headers.tar.gz
|
||||
-P ${CWD}/cmake/scripts/DownloadUrl.cmake
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
-DNODE_INCLUDE_DIR=${NODEJS_HEADERS_PATH}/include
|
||||
-P ${CWD}/cmake/scripts/PrepareNodeHeaders.cmake
|
||||
OUTPUTS
|
||||
${NODEJS_HEADERS_PATH}/include/node/node_version.h
|
||||
${NODEJS_HEADERS_PATH}/include/.node-headers-prepared
|
||||
)
|
||||
|
||||
list(APPEND BUN_CPP_SOURCES
|
||||
@@ -763,6 +768,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${VENDOR_PATH}
|
||||
${VENDOR_PATH}/picohttpparser
|
||||
${NODEJS_HEADERS_PATH}/include
|
||||
${NODEJS_HEADERS_PATH}/include/node
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
@@ -945,14 +951,22 @@ endif()
|
||||
|
||||
if(APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
-Wl,-ld_new
|
||||
-Wl,-no_compact_unwind
|
||||
-Wl,-stack_size,0x1200000
|
||||
-fno-keep-static-consts
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
if(NOT DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
@@ -989,7 +1003,6 @@ if(LINUX)
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,--gc-sections
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
-Wl,-z,lazy
|
||||
@@ -1005,6 +1018,15 @@ if(LINUX)
|
||||
-Wl,--build-id=sha1 # Better for debugging than default
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
if(NOT DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 397dafc9721b8f8046f9448abb6dbc14efe096d3)
|
||||
set(WEBKIT_VERSION 29bbdff0f94f362891f8e007ae2a73f9bc3e66d3)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -261,6 +261,7 @@ _bun_pm_completion() {
|
||||
'hash-string\:"print the string used to hash the lockfile" '
|
||||
'hash-print\:"print the hash stored in the current lockfile" '
|
||||
'cache\:"print the path to the cache folder" '
|
||||
'version\:"bump the version in package.json and create a git tag" '
|
||||
)
|
||||
|
||||
_alternative "args:cmd3:(($sub_commands))"
|
||||
@@ -299,6 +300,40 @@ _bun_pm_completion() {
|
||||
$pmargs &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
version)
|
||||
version_args=(
|
||||
"patch[increment patch version]"
|
||||
"minor[increment minor version]"
|
||||
"major[increment major version]"
|
||||
"prepatch[increment patch version and add pre-release]"
|
||||
"preminor[increment minor version and add pre-release]"
|
||||
"premajor[increment major version and add pre-release]"
|
||||
"prerelease[increment pre-release version]"
|
||||
"from-git[use version from latest git tag]"
|
||||
)
|
||||
|
||||
pmargs=(
|
||||
"--no-git-tag-version[don't create a git commit and tag]"
|
||||
"--allow-same-version[allow bumping to the same version]"
|
||||
"-m[use the given message for the commit]:message"
|
||||
"--message[use the given message for the commit]:message"
|
||||
"--preid[identifier to prefix pre-release versions]:preid"
|
||||
)
|
||||
|
||||
_arguments -s -C \
|
||||
'1: :->cmd' \
|
||||
'2: :->cmd2' \
|
||||
'3: :->increment' \
|
||||
$pmargs &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
increment)
|
||||
_alternative "args:increment:(($version_args))"
|
||||
;;
|
||||
esac
|
||||
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -326,7 +326,11 @@ Bun.serve({
|
||||
|
||||
### HTML imports
|
||||
|
||||
To add a client-side single-page app, you can use an HTML import:
|
||||
Bun supports importing HTML files directly into your server code, enabling full-stack applications with both server-side and client-side code. HTML imports work in two modes:
|
||||
|
||||
**Development (`bun --hot`):** Assets are bundled on-demand at runtime, enabling hot module replacement (HMR) for a fast, iterative development experience. When you change your frontend code, the browser automatically updates without a full page reload.
|
||||
|
||||
**Production (`bun build`):** When building with `bun build --target=bun`, the `import index from "./index.html"` statement resolves to a pre-built manifest object containing all bundled client assets. `Bun.serve` consumes this manifest to serve optimized assets with zero runtime bundling overhead. This is ideal for deploying to production.
|
||||
|
||||
```ts
|
||||
import myReactSinglePageApp from "./index.html";
|
||||
@@ -338,9 +342,9 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
HTML imports don't just serve HTML. It's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser.
|
||||
HTML imports don't just serve HTML — it's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser. You can use this to build full-featured frontends with React, TypeScript, Tailwind CSS, and more.
|
||||
|
||||
You can use this to build a full-featured frontend with React, TypeScript, Tailwind CSS, and more. Check out [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack) to learn more.
|
||||
For a complete guide on building full-stack applications with HTML imports, including detailed examples and best practices, see [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack).
|
||||
|
||||
### Practical example: REST API
|
||||
|
||||
|
||||
@@ -160,7 +160,8 @@ const writer = s3file.writer({
|
||||
partSize: 5 * 1024 * 1024,
|
||||
});
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await writer.write(bigFile);
|
||||
writer.write(bigFile);
|
||||
await writer.flush();
|
||||
}
|
||||
await writer.end();
|
||||
```
|
||||
|
||||
@@ -34,7 +34,7 @@ const proc = Bun.spawn(["cat"], {
|
||||
),
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
const text = await proc.stdout.text();
|
||||
console.log(text); // "const input = "hello world".repeat(400); ..."
|
||||
```
|
||||
|
||||
@@ -113,14 +113,34 @@ proc.stdin.flush();
|
||||
proc.stdin.end();
|
||||
```
|
||||
|
||||
Passing a `ReadableStream` to `stdin` lets you pipe data from a JavaScript `ReadableStream` directly to the subprocess's input:
|
||||
|
||||
```ts
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("Hello from ");
|
||||
controller.enqueue("ReadableStream!");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = Bun.spawn(["cat"], {
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const output = await new Response(proc.stdout).text();
|
||||
console.log(output); // "Hello from ReadableStream!"
|
||||
```
|
||||
|
||||
## Output streams
|
||||
|
||||
You can read results from the subprocess via the `stdout` and `stderr` properties. By default these are instances of `ReadableStream`.
|
||||
|
||||
```ts
|
||||
const proc = Bun.spawn(["bun", "--version"]);
|
||||
const text = await new Response(proc.stdout).text();
|
||||
console.log(text); // => "$BUN_LATEST_VERSION"
|
||||
const text = await proc.stdout.text();
|
||||
console.log(text); // => "$BUN_LATEST_VERSION\n"
|
||||
```
|
||||
|
||||
Configure the output stream by passing one of the following values to `stdout/stderr`:
|
||||
|
||||
@@ -126,6 +126,81 @@ The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that erro
|
||||
|
||||
The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time.
|
||||
|
||||
## Full-stack executables
|
||||
|
||||
{% note %}
|
||||
|
||||
New in Bun v1.2.17
|
||||
|
||||
{% /note %}
|
||||
|
||||
Bun's `--compile` flag can create standalone executables that contain both server and client code, making it ideal for full-stack applications. When you import an HTML file in your server code, Bun automatically bundles all frontend assets (JavaScript, CSS, etc.) and embeds them into the executable. When Bun sees the HTML import on the server, it kicks off a frontend build process to bundle JavaScript, CSS, and other assets.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#server.ts
|
||||
import { serve } from "bun";
|
||||
import index from "./index.html";
|
||||
|
||||
const server = serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/hello": { GET: () => Response.json({ message: "Hello from API" }) },
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Server running at http://localhost:${server.port}`);
|
||||
```
|
||||
|
||||
```html#index.html
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>My App</title>
|
||||
<link rel="stylesheet" href="./styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<script src="./app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
```js#app.js
|
||||
console.log("Hello from the client!");
|
||||
```
|
||||
|
||||
```css#styles.css
|
||||
body {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
To build this into a single executable:
|
||||
|
||||
```sh
|
||||
bun build --compile ./server.ts --outfile myapp
|
||||
```
|
||||
|
||||
This creates a self-contained binary that includes:
|
||||
|
||||
- Your server code
|
||||
- The Bun runtime
|
||||
- All frontend assets (HTML, CSS, JavaScript)
|
||||
- Any npm packages used by your server
|
||||
|
||||
The result is a single file that can be deployed anywhere without needing Node.js, Bun, or any dependencies installed. Just run:
|
||||
|
||||
```sh
|
||||
./myapp
|
||||
```
|
||||
|
||||
Bun automatically handles serving the frontend assets with proper MIME types and cache headers. The HTML import is replaced with a manifest object that `Bun.serve` uses to efficiently serve pre-bundled assets.
|
||||
|
||||
For more details on building full-stack applications with Bun, see the [full-stack guide](/docs/bundler/fullstack).
|
||||
|
||||
## Worker
|
||||
|
||||
To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments:
|
||||
@@ -174,7 +249,7 @@ $ ./hello
|
||||
|
||||
Standalone executables support embedding files.
|
||||
|
||||
To embed files into an executable with `bun build --compile`, import the file in your code
|
||||
To embed files into an executable with `bun build --compile`, import the file in your code.
|
||||
|
||||
```ts
|
||||
// this becomes an internal file path
|
||||
@@ -353,5 +428,4 @@ Currently, the `--compile` flag can only accept a single entrypoint at a time an
|
||||
- `--splitting`
|
||||
- `--public-path`
|
||||
- `--target=node` or `--target=browser`
|
||||
- `--format` - always outputs a binary executable. Internally, it's almost esm.
|
||||
- `--no-bundle` - we always bundle everything into the executable.
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
Using `Bun.serve()`'s `routes` option, you can run your frontend and backend in the same app with no extra steps.
|
||||
|
||||
To get started, import HTML files and pass them to the `routes` option in `Bun.serve()`.
|
||||
|
||||
```ts
|
||||
@@ -234,7 +232,92 @@ When `console: true` is set, Bun will stream console logs from the browser to th
|
||||
|
||||
#### Production mode
|
||||
|
||||
When serving your app in production, set `development: false` in `Bun.serve()`.
|
||||
Hot reloading and `development: true` helps you iterate quickly, but in production, your server should be as fast as possible and have as few external dependencies as possible.
|
||||
|
||||
##### Ahead of time bundling (recommended)
|
||||
|
||||
As of Bun v1.2.17, you can use `Bun.build` or `bun build` to bundle your full-stack application ahead of time.
|
||||
|
||||
```sh
|
||||
$ bun build --target=bun --production --outdir=dist ./src/index.ts
|
||||
```
|
||||
|
||||
When Bun's bundler sees an HTML import from server-side code, it will bundle the referenced JavaScript/TypeScript/TSX/JSX and CSS files into a manifest object that Bun.serve() can use to serve the assets.
|
||||
|
||||
```ts
|
||||
import { serve } from "bun";
|
||||
import index from "./index.html";
|
||||
|
||||
serve({
|
||||
routes: { "/": index },
|
||||
});
|
||||
```
|
||||
|
||||
{% details summary="Internally, the `index` variable is a manifest object that looks something like this" %}
|
||||
|
||||
```json
|
||||
{
|
||||
"index": "./index.html",
|
||||
"files": [
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index-f2me3qnf.js",
|
||||
"loader": "js",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "eet6gn75",
|
||||
"content-type": "text/javascript;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index.html",
|
||||
"loader": "html",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "r9njjakd",
|
||||
"content-type": "text/html;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "index.html",
|
||||
"path": "./index-gysa5fmk.css",
|
||||
"loader": "css",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "50zb7x61",
|
||||
"content-type": "text/css;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "logo.svg",
|
||||
"path": "./logo-kygw735p.svg",
|
||||
"loader": "file",
|
||||
"isEntry": false,
|
||||
"headers": {
|
||||
"etag": "kygw735p",
|
||||
"content-type": "application/octet-stream"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "react.svg",
|
||||
"path": "./react-ck11dneg.svg",
|
||||
"loader": "file",
|
||||
"isEntry": false,
|
||||
"headers": {
|
||||
"etag": "ck11dneg",
|
||||
"content-type": "application/octet-stream"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
##### Runtime bundling
|
||||
|
||||
When adding a build step is too complicated, you can set `development: false` in `Bun.serve()`.
|
||||
|
||||
- Enable in-memory caching of bundled assets. Bun will bundle assets lazily on the first request to an `.html` file, and cache the result in memory until the server restarts.
|
||||
- Enables `Cache-Control` headers and `ETag` headers
|
||||
@@ -298,7 +381,6 @@ Note: this is currently in `bunfig.toml` to make it possible to know statically
|
||||
Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<link>` tags in HTML files, uses them as entrypoints for [Bun's bundler](/docs/bundler), generates an optimized bundle for the JavaScript/TypeScript/TSX/JSX and CSS files, and serves the result.
|
||||
|
||||
1. **`<script>` processing**
|
||||
|
||||
- Transpiles TypeScript, JSX, and TSX in `<script>` tags
|
||||
- Bundles imported dependencies
|
||||
- Generates sourcemaps for debugging
|
||||
@@ -309,7 +391,6 @@ Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<
|
||||
```
|
||||
|
||||
2. **`<link>` processing**
|
||||
|
||||
- Processes CSS imports and `<link>` tags
|
||||
- Concatenates CSS files
|
||||
- Rewrites `url` and asset paths to include content-addressable hashes in URLs
|
||||
@@ -319,18 +400,15 @@ Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<
|
||||
```
|
||||
|
||||
3. **`<img>` & asset processing**
|
||||
|
||||
- Links to assets are rewritten to include content-addressable hashes in URLs
|
||||
- Small assets in CSS files are inlined into `data:` URLs, reducing the total number of HTTP requests sent over the wire
|
||||
|
||||
4. **Rewrite HTML**
|
||||
|
||||
- Combines all `<script>` tags into a single `<script>` tag with a content-addressable hash in the URL
|
||||
- Combines all `<link>` tags into a single `<link>` tag with a content-addressable hash in the URL
|
||||
- Outputs a new HTML file
|
||||
|
||||
5. **Serve**
|
||||
|
||||
- All the output files from the bundler are exposed as static routes, using the same mechanism internally as when you pass a `Response` object to [`static` in `Bun.serve()`](/docs/api/http#static-routes).
|
||||
|
||||
This works similarly to how [`Bun.build` processes HTML files](/docs/bundler/html).
|
||||
|
||||
@@ -26,6 +26,7 @@ The bundler is a key piece of infrastructure in the JavaScript ecosystem. As a b
|
||||
- **Reducing HTTP requests.** A single package in `node_modules` may consist of hundreds of files, and large applications may have dozens of such dependencies. Loading each of these files with a separate HTTP request becomes untenable very quickly, so bundlers are used to convert our application source code into a smaller number of self-contained "bundles" that can be loaded with a single request.
|
||||
- **Code transforms.** Modern apps are commonly built with languages or tools like TypeScript, JSX, and CSS modules, all of which must be converted into plain JavaScript and CSS before they can be consumed by a browser. The bundler is the natural place to configure these transformations.
|
||||
- **Framework features.** Frameworks rely on bundler plugins & code transformations to implement common patterns like file-system routing, client-server code co-location (think `getServerSideProps` or Remix loaders), and server components.
|
||||
- **Full-stack Applications.** Bun's bundler can handle both server and client code in a single command, enabling optimized production builds and single-file executables. With build-time HTML imports, you can bundle your entire application — frontend assets and backend server — into a single deployable unit.
|
||||
|
||||
Let's jump into the bundler API.
|
||||
|
||||
@@ -324,7 +325,7 @@ Depending on the target, Bun will apply different module resolution rules and op
|
||||
---
|
||||
|
||||
- `bun`
|
||||
- For generating bundles that are intended to be run by the Bun runtime. In many cases, it isn't necessary to bundle server-side code; you can directly execute the source code without modification. However, bundling your server code can reduce startup times and improve running performance.
|
||||
- For generating bundles that are intended to be run by the Bun runtime. In many cases, it isn't necessary to bundle server-side code; you can directly execute the source code without modification. However, bundling your server code can reduce startup times and improve running performance. This is the target to use for building full-stack applications with build-time HTML imports, where both server and client code are bundled together.
|
||||
|
||||
All bundles generated with `target: "bun"` are marked with a special `// @bun` pragma, which indicates to the Bun runtime that there's no need to re-transpile the file before execution.
|
||||
|
||||
|
||||
@@ -137,7 +137,7 @@ console.log(contents); // => "Hello, world!"
|
||||
import html from "./index.html" with { type: "text" };
|
||||
```
|
||||
|
||||
When referenced during a build, the contents are into the bundle as a string.
|
||||
When referenced during a build, the contents are inlined into the bundle as a string.
|
||||
|
||||
```ts
|
||||
var contents = `Hello, world!`;
|
||||
@@ -262,6 +262,20 @@ Currently, the list of selectors is:
|
||||
- `video[poster]`
|
||||
- `video[src]`
|
||||
|
||||
{% callout %}
|
||||
|
||||
**HTML Loader Behavior in Different Contexts**
|
||||
|
||||
The `html` loader behaves differently depending on how it's used:
|
||||
|
||||
1. **Static Build:** When you run `bun build ./index.html`, Bun produces a static site with all assets bundled and hashed.
|
||||
|
||||
2. **Runtime:** When you run `bun run server.ts` (where `server.ts` imports an HTML file), Bun bundles assets on-the-fly during development, enabling features like hot module replacement.
|
||||
|
||||
3. **Full-stack Build:** When you run `bun build --target=bun server.ts` (where `server.ts` imports an HTML file), the import resolves to a manifest object that `Bun.serve` uses to efficiently serve pre-bundled assets in production.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
### `sh` loader
|
||||
|
||||
**Bun Shell loader**. Default for `.sh` files
|
||||
|
||||
@@ -125,7 +125,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
- `--target`
|
||||
- n/a
|
||||
- No supported. Bun's bundler performs no syntactic down-leveling at this time.
|
||||
- Not supported. Bun's bundler performs no syntactic down-leveling at this time.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -308,14 +308,12 @@ IF remote template
|
||||
1. GET `registry.npmjs.org/@bun-examples/${template}/latest` and parse it
|
||||
2. GET `registry.npmjs.org/@bun-examples/${template}/-/${template}-${latestVersion}.tgz`
|
||||
3. Decompress & extract `${template}-${latestVersion}.tgz` into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
IF GitHub repo
|
||||
|
||||
1. Download the tarball from GitHub’s API
|
||||
2. Decompress & extract into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
ELSE IF local template
|
||||
@@ -333,7 +331,6 @@ ELSE IF local template
|
||||
7. Run `${npmClient} install` unless `--no-install` is passed OR no dependencies are in package.json
|
||||
8. Run any tasks defined in `"bun-create": { "postinstall" }` with the npm client
|
||||
9. Run `git init; git add -A .; git commit -am "Initial Commit";`
|
||||
|
||||
- Rename `gitignore` to `.gitignore`. NPM automatically removes `.gitignore` files from appearing in packages.
|
||||
- If there are dependencies, this runs in a separate thread concurrently while node_modules are being installed
|
||||
- Using libgit2 if available was tested and performed 3x slower in microbenchmarks
|
||||
|
||||
@@ -28,7 +28,7 @@ $ bun install --filter '!pkg-c'
|
||||
$ bun install --filter './packages/*'
|
||||
|
||||
# Save as above, but exclude the root package.json
|
||||
$ bun install --filter --filter '!./' --filter './packages/*'
|
||||
$ bun install --filter '!./' --filter './packages/*'
|
||||
```
|
||||
|
||||
Similarly, `bun outdated` will display outdated dependencies for all packages in the monorepo, and `--filter` can be used to restrict the command to a subset of the packages:
|
||||
|
||||
@@ -151,3 +151,45 @@ $ bun pm default-trusted
|
||||
```
|
||||
|
||||
see the current list on GitHub [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt)
|
||||
|
||||
## version
|
||||
|
||||
To display current package version and help:
|
||||
|
||||
```bash
|
||||
$ bun pm version
|
||||
bun pm version v$BUN_LATEST_VERSION (ca7428e9)
|
||||
Current package version: v1.0.0
|
||||
|
||||
Increment:
|
||||
patch 1.0.0 → 1.0.1
|
||||
minor 1.0.0 → 1.1.0
|
||||
major 1.0.0 → 2.0.0
|
||||
prerelease 1.0.0 → 1.0.1-0
|
||||
prepatch 1.0.0 → 1.0.1-0
|
||||
preminor 1.0.0 → 1.1.0-0
|
||||
premajor 1.0.0 → 2.0.0-0
|
||||
from-git Use version from latest git tag
|
||||
1.2.3 Set specific version
|
||||
|
||||
Options:
|
||||
--no-git-tag-version Skip git operations
|
||||
--allow-same-version Prevents throwing error if version is the same
|
||||
--message=<val>, -m Custom commit message, use %s for version substitution
|
||||
--preid=<val> Prerelease identifier (i.e beta → 1.0.1-beta.0)
|
||||
--force, -f Bypass dirty git history check
|
||||
|
||||
Examples:
|
||||
$ bun pm version patch
|
||||
$ bun pm version 1.2.3 --no-git-tag-version
|
||||
$ bun pm version prerelease --preid beta --message "Release beta: %s"
|
||||
```
|
||||
|
||||
To bump the version in `package.json`:
|
||||
|
||||
```bash
|
||||
$ bun pm version patch
|
||||
v1.0.1
|
||||
```
|
||||
|
||||
Supports `patch`, `minor`, `major`, `premajor`, `preminor`, `prepatch`, `prerelease`, `from-git`, or specific versions like `1.2.3`. By default creates git commit and tag unless `--no-git-tag-version` was used to skip.
|
||||
|
||||
@@ -30,7 +30,7 @@ $ bun add @prisma/client
|
||||
We'll use the Prisma CLI with `bunx` to initialize our schema and migration directory. For simplicity we'll be using an in-memory SQLite database.
|
||||
|
||||
```bash
|
||||
$ bunx prisma init --datasource-provider sqlite
|
||||
$ bunx --bun prisma init --datasource-provider sqlite
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -13,14 +13,14 @@ proc.stderr; // => ReadableStream
|
||||
|
||||
---
|
||||
|
||||
To read `stderr` until the child process exits, use the [`Bun.readableStreamToText()`](https://bun.sh/docs/api/utils#bun-readablestreamto) convenience function.
|
||||
To read `stderr` until the child process exits, use .text()
|
||||
|
||||
```ts
|
||||
const proc = Bun.spawn(["echo", "hello"], {
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const errors: string = await Bun.readableStreamToText(proc.stderr);
|
||||
const errors: string = await proc.stderr.text();
|
||||
if (errors) {
|
||||
// handle errors
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ When using [`Bun.spawn()`](https://bun.sh/docs/api/spawn), the `stdout` of the c
|
||||
```ts
|
||||
const proc = Bun.spawn(["echo", "hello"]);
|
||||
|
||||
const output = await new Response(proc.stdout).text();
|
||||
const output = await proc.stdout.text();
|
||||
output; // => "hello"
|
||||
```
|
||||
|
||||
|
||||
@@ -32,8 +32,8 @@ By default, the `stdout` of the child process can be consumed as a `ReadableStre
|
||||
```ts
|
||||
const proc = Bun.spawn(["echo", "hello"]);
|
||||
|
||||
const output = await new Response(proc.stdout).text();
|
||||
output; // => "hello"
|
||||
const output = await proc.stdout.text();
|
||||
output; // => "hello\n"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -46,7 +46,7 @@ test
|
||||
The `snap.test.ts.snap` file is a JavaScript file that exports a serialized version of the value passed into `expect()`. The `{foo: "bar"}` object has been serialized to JSON.
|
||||
|
||||
```js
|
||||
// Bun Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
|
||||
|
||||
exports[`snapshot 1`] = `
|
||||
{
|
||||
|
||||
@@ -353,6 +353,8 @@ dryRun = false
|
||||
|
||||
To configure the directory where Bun puts globally installed packages.
|
||||
|
||||
Environment variable: `BUN_INSTALL_GLOBAL_DIR`
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# where `bun install --global` installs packages
|
||||
@@ -363,6 +365,8 @@ globalDir = "~/.bun/install/global"
|
||||
|
||||
To configure the directory where Bun installs globally installed binaries and CLIs.
|
||||
|
||||
Environment variable: `BUN_INSTALL_BIN`
|
||||
|
||||
```toml
|
||||
# where globally-installed package bins are linked
|
||||
globalBinDir = "~/.bun/bin"
|
||||
|
||||
@@ -206,13 +206,11 @@ Understanding how `mock.module()` works helps you use it more effectively:
|
||||
2. **Lazy Evaluation**: The mock factory callback is only evaluated when the module is actually imported or required.
|
||||
|
||||
3. **Path Resolution**: Bun automatically resolves the module specifier as though you were doing an import, supporting:
|
||||
|
||||
- Relative paths (`'./module'`)
|
||||
- Absolute paths (`'/path/to/module'`)
|
||||
- Package names (`'lodash'`)
|
||||
|
||||
4. **Import Timing Effects**:
|
||||
|
||||
- When mocking before first import: No side effects from the original module occur
|
||||
- When mocking after import: The original module's side effects have already happened
|
||||
- For this reason, using `--preload` is recommended for mocks that need to prevent side effects
|
||||
|
||||
@@ -12,7 +12,7 @@ const C = bun.C;
|
||||
const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
const Method = @import("../src/http/method.zig").Method;
|
||||
const Method = @import("../src/http/Method.zig").Method;
|
||||
const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType;
|
||||
const HeadersTuple = ColonListType(string, noop_resolver);
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
|
||||
@@ -14,7 +14,7 @@ const clap = @import("../src/deps/zig-clap/clap.zig");
|
||||
|
||||
const URL = @import("../src/url.zig").URL;
|
||||
const Headers = bun.http.Headers;
|
||||
const Method = @import("../src/http/method.zig").Method;
|
||||
const Method = @import("../src/http/Method.zig").Method;
|
||||
const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType;
|
||||
const HeadersTuple = ColonListType(string, noop_resolver);
|
||||
const path_handler = @import("../src/resolver/resolve_path.zig");
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.17",
|
||||
"version": "1.2.19",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.3",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -24,7 +23,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun run build:debug",
|
||||
"watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib -Doverride-no-export-cpp-apis=true",
|
||||
"ci": "bun scripts/buildkite-failures.ts ",
|
||||
"watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
|
||||
@@ -74,7 +74,7 @@
|
||||
"clang-tidy:diff": "bun run analysis --target clang-tidy-diff",
|
||||
"zig-format": "bun run analysis:no-llvm --target zig-format",
|
||||
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
|
||||
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
|
||||
"prettier": "bunx --bun prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
|
||||
"node:test:cp": "bun ./scripts/fetch-node-test.ts ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Bun Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
|
||||
|
||||
exports[`runTests() can run all tests 1`] = `
|
||||
{
|
||||
|
||||
@@ -124,7 +124,7 @@ const argv0 = argv0_stdout.toString().trim();
|
||||
|
||||
console.log(`Testing ${argv0} v${revision}`);
|
||||
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.26100.0\\shared\\ntstatus.h";
|
||||
let ntstatus_header_cache = null;
|
||||
function lookupWindowsError(code) {
|
||||
if (ntstatus_header_cache === null) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Bun Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
|
||||
|
||||
exports[`Bun.plugin using { forceSide: 'server' } allows for imported components to be SSR'd: foo.svelte - head 1`] = `""`;
|
||||
|
||||
|
||||
@@ -65,13 +65,11 @@ Note: The order of references in `index.d.ts` is important - `bun.ns.d.ts` must
|
||||
### Best Practices
|
||||
|
||||
1. **Type Safety**
|
||||
|
||||
- Please use strict types instead of `any` where possible
|
||||
- Leverage TypeScript's type system features (generics, unions, etc.)
|
||||
- Document complex types with JSDoc comments
|
||||
|
||||
2. **Compatibility**
|
||||
|
||||
- Use `Bun.__internal.UseLibDomIfAvailable<LibDomName extends string, OurType>` for types that might conflict with lib.dom.d.ts (see [`./fetch.d.ts`](./fetch.d.ts) for a real example)
|
||||
- `@types/node` often expects variables to always be defined (this was the biggest cause of most of the conflicts in the past!), so we use the `UseLibDomIfAvailable` type to make sure we don't overwrite `lib.dom.d.ts` but still provide Bun types while simultaneously declaring the variable exists (for Node to work) in the cases that we can.
|
||||
|
||||
|
||||
584
packages/bun-types/bun.d.ts
vendored
584
packages/bun-types/bun.d.ts
vendored
@@ -45,6 +45,7 @@ declare module "bun" {
|
||||
type DOMHighResTimeStamp = number;
|
||||
type EventListenerOrEventListenerObject = EventListener | EventListenerObject;
|
||||
type BlobOrStringOrBuffer = string | NodeJS.TypedArray | ArrayBufferLike | Blob;
|
||||
type MaybePromise<T> = T | Promise<T>;
|
||||
|
||||
namespace __internal {
|
||||
type LibDomIsLoaded = typeof globalThis extends { onabort: any } ? true : false;
|
||||
@@ -852,6 +853,8 @@ declare module "bun" {
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks or the concatenated chunks as a {@link Uint8Array}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.bytes}
|
||||
*/
|
||||
function readableStreamToBytes(
|
||||
stream: ReadableStream<ArrayBufferView | ArrayBufferLike>,
|
||||
@@ -864,6 +867,8 @@ declare module "bun" {
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link Blob}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.blob}
|
||||
*/
|
||||
function readableStreamToBlob(stream: ReadableStream): Promise<Blob>;
|
||||
|
||||
@@ -906,6 +911,8 @@ declare module "bun" {
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.text}
|
||||
*/
|
||||
function readableStreamToText(stream: ReadableStream): Promise<string>;
|
||||
|
||||
@@ -916,6 +923,8 @@ declare module "bun" {
|
||||
*
|
||||
* @param stream The stream to consume.
|
||||
* @returns A promise that resolves with the concatenated chunks as a {@link String}.
|
||||
*
|
||||
* @deprecated Use {@link ReadableStream.json}
|
||||
*/
|
||||
function readableStreamToJSON(stream: ReadableStream): Promise<any>;
|
||||
|
||||
@@ -1242,9 +1251,9 @@ declare module "bun" {
|
||||
*/
|
||||
writer(options?: { highWaterMark?: number }): FileSink;
|
||||
|
||||
readonly readable: ReadableStream;
|
||||
|
||||
// TODO: writable: WritableStream;
|
||||
// TODO
|
||||
// readonly readable: ReadableStream<Uint8Array>;
|
||||
// readonly writable: WritableStream<Uint8Array>;
|
||||
|
||||
/**
|
||||
* A UNIX timestamp indicating when the file was last modified.
|
||||
@@ -1303,116 +1312,285 @@ declare module "bun" {
|
||||
stat(): Promise<import("node:fs").Stats>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration options for SQL client connection and behavior
|
||||
* @example
|
||||
* const config: SQLOptions = {
|
||||
* host: 'localhost',
|
||||
* port: 5432,
|
||||
* user: 'dbuser',
|
||||
* password: 'secretpass',
|
||||
* database: 'myapp',
|
||||
* idleTimeout: 30,
|
||||
* max: 20,
|
||||
* onconnect: (client) => {
|
||||
* console.log('Connected to database');
|
||||
* }
|
||||
* };
|
||||
*/
|
||||
namespace SQL {
|
||||
type AwaitPromisesArray<T extends Array<PromiseLike<any>>> = {
|
||||
[K in keyof T]: Awaited<T[K]>;
|
||||
};
|
||||
|
||||
interface SQLOptions {
|
||||
/** Connection URL (can be string or URL object) */
|
||||
url?: URL | string;
|
||||
/** Database server hostname */
|
||||
host?: string;
|
||||
/** Database server hostname (alias for host) */
|
||||
hostname?: string;
|
||||
/** Database server port number */
|
||||
port?: number | string;
|
||||
/** Database user for authentication */
|
||||
username?: string;
|
||||
/** Database user for authentication (alias for username) */
|
||||
user?: string;
|
||||
/** Database password for authentication */
|
||||
password?: string | (() => Promise<string>);
|
||||
/** Database password for authentication (alias for password) */
|
||||
pass?: string | (() => Promise<string>);
|
||||
/** Name of the database to connect to */
|
||||
database?: string;
|
||||
/** Name of the database to connect to (alias for database) */
|
||||
db?: string;
|
||||
/** Database adapter/driver to use */
|
||||
adapter?: string;
|
||||
/** Maximum time in seconds to wait for connection to become available */
|
||||
idleTimeout?: number;
|
||||
/** Maximum time in seconds to wait for connection to become available (alias for idleTimeout) */
|
||||
idle_timeout?: number;
|
||||
/** Maximum time in seconds to wait when establishing a connection */
|
||||
connectionTimeout?: number;
|
||||
/** Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout) */
|
||||
connection_timeout?: number;
|
||||
/** Maximum lifetime in seconds of a connection */
|
||||
maxLifetime?: number;
|
||||
/** Maximum lifetime in seconds of a connection (alias for maxLifetime) */
|
||||
max_lifetime?: number;
|
||||
/** Whether to use TLS/SSL for the connection */
|
||||
tls?: TLSOptions | boolean;
|
||||
/** Whether to use TLS/SSL for the connection (alias for tls) */
|
||||
ssl?: TLSOptions | boolean;
|
||||
/** Callback function executed when a connection is established */
|
||||
onconnect?: (client: SQL) => void;
|
||||
/** Callback function executed when a connection is closed */
|
||||
onclose?: (client: SQL) => void;
|
||||
/** Maximum number of connections in the pool */
|
||||
max?: number;
|
||||
/** By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts. */
|
||||
bigint?: boolean;
|
||||
/** Automatic creation of prepared statements, defaults to true */
|
||||
prepare?: boolean;
|
||||
type ContextCallbackResult<T> = T extends Array<PromiseLike<any>> ? AwaitPromisesArray<T> : Awaited<T>;
|
||||
type ContextCallback<T, SQL> = (sql: SQL) => Promise<T>;
|
||||
|
||||
/**
|
||||
* Configuration options for SQL client connection and behavior
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const config: Bun.SQL.Options = {
|
||||
* host: 'localhost',
|
||||
* port: 5432,
|
||||
* user: 'dbuser',
|
||||
* password: 'secretpass',
|
||||
* database: 'myapp',
|
||||
* idleTimeout: 30,
|
||||
* max: 20,
|
||||
* onconnect: (client) => {
|
||||
* console.log('Connected to database');
|
||||
* }
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
interface Options {
|
||||
/**
|
||||
* Connection URL (can be string or URL object)
|
||||
*/
|
||||
url?: URL | string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname
|
||||
* @default "localhost"
|
||||
*/
|
||||
host?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname (alias for host)
|
||||
* @deprecated Prefer {@link host}
|
||||
* @default "localhost"
|
||||
*/
|
||||
hostname?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server port number
|
||||
* @default 5432
|
||||
*/
|
||||
port?: number | string | undefined;
|
||||
|
||||
/**
|
||||
* Database user for authentication
|
||||
* @default "postgres"
|
||||
*/
|
||||
username?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database user for authentication (alias for username)
|
||||
* @deprecated Prefer {@link username}
|
||||
* @default "postgres"
|
||||
*/
|
||||
user?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database password for authentication
|
||||
* @default ""
|
||||
*/
|
||||
password?: string | (() => MaybePromise<string>) | undefined;
|
||||
|
||||
/**
|
||||
* Database password for authentication (alias for password)
|
||||
* @deprecated Prefer {@link password}
|
||||
* @default ""
|
||||
*/
|
||||
pass?: string | (() => MaybePromise<string>) | undefined;
|
||||
|
||||
/**
|
||||
* Name of the database to connect to
|
||||
* @default The username value
|
||||
*/
|
||||
database?: string | undefined;
|
||||
|
||||
/**
|
||||
* Name of the database to connect to (alias for database)
|
||||
* @deprecated Prefer {@link database}
|
||||
* @default The username value
|
||||
*/
|
||||
db?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database adapter/driver to use
|
||||
* @default "postgres"
|
||||
*/
|
||||
adapter?: "postgres" /*| "sqlite" | "mysql"*/ | (string & {}) | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait for connection to become available
|
||||
* @default 0 (no timeout)
|
||||
*/
|
||||
idleTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait for connection to become available (alias for idleTimeout)
|
||||
* @deprecated Prefer {@link idleTimeout}
|
||||
* @default 0 (no timeout)
|
||||
*/
|
||||
idle_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection
|
||||
* @default 30
|
||||
*/
|
||||
connectionTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connection_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connectTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connect_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum lifetime in seconds of a connection
|
||||
* @default 0 (no maximum lifetime)
|
||||
*/
|
||||
maxLifetime?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum lifetime in seconds of a connection (alias for maxLifetime)
|
||||
* @deprecated Prefer {@link maxLifetime}
|
||||
* @default 0 (no maximum lifetime)
|
||||
*/
|
||||
max_lifetime?: number | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection
|
||||
* @default false
|
||||
*/
|
||||
tls?: TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection (alias for tls)
|
||||
* @default false
|
||||
*/
|
||||
ssl?: TLSOptions | boolean | undefined;
|
||||
|
||||
// `.path` is currently unsupported in Bun, the implementation is incomplete.
|
||||
//
|
||||
// /**
|
||||
// * Unix domain socket path for connection
|
||||
// * @default ""
|
||||
// */
|
||||
// path?: string | undefined;
|
||||
|
||||
/**
|
||||
* Callback function executed when a connection is established
|
||||
*/
|
||||
onconnect?: ((client: SQL) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Callback function executed when a connection is closed
|
||||
*/
|
||||
onclose?: ((client: SQL) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Postgres client runtime configuration options
|
||||
*
|
||||
* @see https://www.postgresql.org/docs/current/runtime-config-client.html
|
||||
*/
|
||||
connection?: Record<string, string | boolean | number> | undefined;
|
||||
|
||||
/**
|
||||
* Maximum number of connections in the pool
|
||||
* @default 10
|
||||
*/
|
||||
max?: number | undefined;
|
||||
|
||||
/**
|
||||
* By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts.
|
||||
* @default false
|
||||
*/
|
||||
bigint?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Automatic creation of prepared statements
|
||||
* @default true
|
||||
*/
|
||||
prepare?: boolean | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a SQL query that can be executed, with additional control methods
|
||||
* Extends Promise to allow for async/await usage
|
||||
*/
|
||||
interface Query<T> extends Promise<T> {
|
||||
/**
|
||||
* Indicates if the query is currently executing
|
||||
*/
|
||||
active: boolean;
|
||||
|
||||
/**
|
||||
* Indicates if the query has been cancelled
|
||||
*/
|
||||
cancelled: boolean;
|
||||
|
||||
/**
|
||||
* Cancels the executing query
|
||||
*/
|
||||
cancel(): Query<T>;
|
||||
|
||||
/**
|
||||
* Executes the query as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons
|
||||
*/
|
||||
simple(): Query<T>;
|
||||
|
||||
/**
|
||||
* Executes the query
|
||||
*/
|
||||
execute(): Query<T>;
|
||||
|
||||
/**
|
||||
* Returns the raw query result
|
||||
*/
|
||||
raw(): Query<T>;
|
||||
|
||||
/**
|
||||
* Returns only the values from the query result
|
||||
*/
|
||||
values(): Query<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback function type for transaction contexts
|
||||
* @param sql Function to execute SQL queries within the transaction
|
||||
*/
|
||||
type TransactionContextCallback<T> = ContextCallback<T, TransactionSQL>;
|
||||
|
||||
/**
|
||||
* Callback function type for savepoint contexts
|
||||
* @param sql Function to execute SQL queries within the savepoint
|
||||
*/
|
||||
type SavepointContextCallback<T> = ContextCallback<T, SavepointSQL>;
|
||||
|
||||
/**
|
||||
* SQL.Helper represents a parameter or serializable
|
||||
* value inside of a query.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const helper = sql(users, 'id');
|
||||
* await sql`insert into users ${helper}`;
|
||||
* ```
|
||||
*/
|
||||
interface Helper<T> {
|
||||
readonly value: T[];
|
||||
readonly columns: (keyof T)[];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a SQL query that can be executed, with additional control methods
|
||||
* Extends Promise to allow for async/await usage
|
||||
*/
|
||||
interface SQLQuery<T = any> extends Promise<T> {
|
||||
/** Indicates if the query is currently executing */
|
||||
active: boolean;
|
||||
|
||||
/** Indicates if the query has been cancelled */
|
||||
cancelled: boolean;
|
||||
|
||||
/** Cancels the executing query */
|
||||
cancel(): SQLQuery<T>;
|
||||
|
||||
/** Execute as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons */
|
||||
simple(): SQLQuery<T>;
|
||||
|
||||
/** Executes the query */
|
||||
execute(): SQLQuery<T>;
|
||||
|
||||
/** Returns the raw query result */
|
||||
raw(): SQLQuery<T>;
|
||||
|
||||
/** Returns only the values from the query result */
|
||||
values(): SQLQuery<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback function type for transaction contexts
|
||||
* @param sql Function to execute SQL queries within the transaction
|
||||
*/
|
||||
type SQLTransactionContextCallback = (sql: TransactionSQL) => Promise<any> | Array<SQLQuery>;
|
||||
/**
|
||||
* Callback function type for savepoint contexts
|
||||
* @param sql Function to execute SQL queries within the savepoint
|
||||
*/
|
||||
type SQLSavepointContextCallback = (sql: SavepointSQL) => Promise<any> | Array<SQLQuery>;
|
||||
|
||||
/**
|
||||
* Main SQL client interface providing connection and transaction management
|
||||
*/
|
||||
interface SQL {
|
||||
interface SQL extends AsyncDisposable {
|
||||
/**
|
||||
* Executes a SQL query using template literals
|
||||
* @example
|
||||
@@ -1420,7 +1598,12 @@ declare module "bun" {
|
||||
* const [user] = await sql`select * from users where id = ${1}`;
|
||||
* ```
|
||||
*/
|
||||
(strings: string[] | TemplateStringsArray, ...values: any[]): SQLQuery;
|
||||
<T = any>(strings: TemplateStringsArray, ...values: unknown[]): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Execute a SQL query using a string
|
||||
*/
|
||||
<T = any>(string: string): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Helper function for inserting an object into a query
|
||||
@@ -1428,16 +1611,19 @@ declare module "bun" {
|
||||
* @example
|
||||
* ```ts
|
||||
* // Insert an object
|
||||
* const result = await sql`insert into users ${sql(users)} RETURNING *`;
|
||||
* const result = await sql`insert into users ${sql(users)} returning *`;
|
||||
*
|
||||
* // Or pick specific columns
|
||||
* const result = await sql`insert into users ${sql(users, "id", "name")} RETURNING *`;
|
||||
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
|
||||
*
|
||||
* // Or a single object
|
||||
* const result = await sql`insert into users ${sql(user)} RETURNING *`;
|
||||
* const result = await sql`insert into users ${sql(user)} returning *`;
|
||||
* ```
|
||||
*/
|
||||
<T extends { [Key in PropertyKey]: unknown }>(obj: T | T[] | readonly T[], ...columns: (keyof T)[]): SQLQuery;
|
||||
<T extends { [Key in PropertyKey]: unknown }, Keys extends keyof T = keyof T>(
|
||||
obj: T | T[] | readonly T[],
|
||||
...columns: readonly Keys[]
|
||||
): SQL.Helper<Pick<T, Keys>>;
|
||||
|
||||
/**
|
||||
* Helper function for inserting any serializable value into a query
|
||||
@@ -1447,7 +1633,7 @@ declare module "bun" {
|
||||
* const result = await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`;
|
||||
* ```
|
||||
*/
|
||||
(obj: unknown): SQLQuery;
|
||||
<T>(value: T): SQL.Helper<T>;
|
||||
|
||||
/**
|
||||
* Commits a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
|
||||
@@ -1519,6 +1705,7 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* The reserve method pulls out a connection from the pool, and returns a client that wraps the single connection.
|
||||
*
|
||||
* This can be used for running queries on an isolated connection.
|
||||
* Calling reserve in a reserved Sql will return a new reserved connection, not the same connection (behavior matches postgres package).
|
||||
*
|
||||
@@ -1544,7 +1731,10 @@ declare module "bun" {
|
||||
* ```
|
||||
*/
|
||||
reserve(): Promise<ReservedSQL>;
|
||||
/** Begins a new transaction
|
||||
|
||||
/**
|
||||
* Begins a new transaction.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @example
|
||||
@@ -1568,8 +1758,11 @@ declare module "bun" {
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
begin(fn: SQLTransactionContextCallback): Promise<any>;
|
||||
/** Begins a new transaction with options
|
||||
begin<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Begins a new transaction with options.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @example
|
||||
@@ -1593,8 +1786,11 @@ declare module "bun" {
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
begin(options: string, fn: SQLTransactionContextCallback): Promise<any>;
|
||||
/** Alternative method to begin a transaction
|
||||
begin<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Alternative method to begin a transaction.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @alias begin
|
||||
@@ -1619,11 +1815,15 @@ declare module "bun" {
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
transaction(fn: SQLTransactionContextCallback): Promise<any>;
|
||||
/** Alternative method to begin a transaction with options
|
||||
transaction<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Alternative method to begin a transaction with options
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @alias begin
|
||||
*
|
||||
* @alias {@link begin}
|
||||
*
|
||||
* @example
|
||||
* const [user, account] = await sql.transaction("read write", async sql => {
|
||||
* const [user] = await sql`
|
||||
@@ -1643,15 +1843,18 @@ declare module "bun" {
|
||||
* returning *
|
||||
* `
|
||||
* return [user, account]
|
||||
* })
|
||||
* });
|
||||
*/
|
||||
transaction(options: string, fn: SQLTransactionContextCallback): Promise<any>;
|
||||
/** Begins a distributed transaction
|
||||
transaction<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Begins a distributed transaction
|
||||
* Also know as Two-Phase Commit, in a distributed transaction, Phase 1 involves the coordinator preparing nodes by ensuring data is written and ready to commit, while Phase 2 finalizes with nodes committing or rolling back based on the coordinator's decision, ensuring durability and releasing locks.
|
||||
* In PostgreSQL and MySQL distributed transactions persist beyond the original session, allowing privileged users or coordinators to commit/rollback them, ensuring support for distributed transactions, recovery, and administrative tasks.
|
||||
* beginDistributed will automatic rollback if any exception are not caught, and you can commit and rollback later if everything goes well.
|
||||
* PostgreSQL natively supports distributed transactions using PREPARE TRANSACTION, while MySQL uses XA Transactions, and MSSQL also supports distributed/XA transactions. However, in MSSQL, distributed transactions are tied to the original session, the DTC coordinator, and the specific connection.
|
||||
* These transactions are automatically committed or rolled back following the same rules as regular transactions, with no option for manual intervention from other sessions, in MSSQL distributed transactions are used to coordinate transactions using Linked Servers.
|
||||
*
|
||||
* @example
|
||||
* await sql.beginDistributed("numbers", async sql => {
|
||||
* await sql`create table if not exists numbers (a int)`;
|
||||
@@ -1661,31 +1864,38 @@ declare module "bun" {
|
||||
* await sql.commitDistributed("numbers");
|
||||
* // or await sql.rollbackDistributed("numbers");
|
||||
*/
|
||||
beginDistributed(name: string, fn: SQLTransactionContextCallback): Promise<any>;
|
||||
beginDistributed<const T>(
|
||||
name: string,
|
||||
fn: SQL.TransactionContextCallback<T>,
|
||||
): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/** Alternative method to begin a distributed transaction
|
||||
* @alias beginDistributed
|
||||
* @alias {@link beginDistributed}
|
||||
*/
|
||||
distributed(name: string, fn: SQLTransactionContextCallback): Promise<any>;
|
||||
distributed<const T>(name: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**If you know what you're doing, you can use unsafe to pass any string you'd like.
|
||||
* Please note that this can lead to SQL injection if you're not careful.
|
||||
* You can also nest sql.unsafe within a safe sql expression. This is useful if only part of your fraction has unsafe elements.
|
||||
* @example
|
||||
* const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`)
|
||||
*/
|
||||
unsafe(string: string, values?: any[]): SQLQuery;
|
||||
unsafe<T = any>(string: string, values?: any[]): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Reads a file and uses the contents as a query.
|
||||
* Optional parameters can be used if the file includes $1, $2, etc
|
||||
* @example
|
||||
* const result = await sql.file("query.sql", [1, 2, 3]);
|
||||
*/
|
||||
file(filename: string, values?: any[]): SQLQuery;
|
||||
file<T = any>(filename: string, values?: any[]): SQL.Query<T>;
|
||||
|
||||
/** Current client options */
|
||||
options: SQLOptions;
|
||||
|
||||
[Symbol.asyncDispose](): Promise<any>;
|
||||
/**
|
||||
* Current client options
|
||||
*/
|
||||
options: SQL.Options;
|
||||
}
|
||||
|
||||
const SQL: {
|
||||
/**
|
||||
* Creates a new SQL client instance
|
||||
@@ -1711,7 +1921,7 @@ declare module "bun" {
|
||||
* const sql = new SQL("postgres://localhost:5432/mydb", { idleTimeout: 1000 });
|
||||
* ```
|
||||
*/
|
||||
new (connectionString: string | URL, options: Omit<SQLOptions, "url">): SQL;
|
||||
new (connectionString: string | URL, options: Omit<SQL.Options, "url">): SQL;
|
||||
|
||||
/**
|
||||
* Creates a new SQL client instance with options
|
||||
@@ -1723,17 +1933,18 @@ declare module "bun" {
|
||||
* const sql = new SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 });
|
||||
* ```
|
||||
*/
|
||||
new (options?: SQLOptions): SQL;
|
||||
new (options?: SQL.Options): SQL;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a reserved connection from the connection pool
|
||||
* Extends SQL with additional release functionality
|
||||
*/
|
||||
interface ReservedSQL extends SQL {
|
||||
/** Releases the client back to the connection pool */
|
||||
interface ReservedSQL extends SQL, Disposable {
|
||||
/**
|
||||
* Releases the client back to the connection pool
|
||||
*/
|
||||
release(): void;
|
||||
[Symbol.dispose](): void;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1742,26 +1953,30 @@ declare module "bun" {
|
||||
*/
|
||||
interface TransactionSQL extends SQL {
|
||||
/** Creates a savepoint within the current transaction */
|
||||
savepoint(name: string, fn: SQLSavepointContextCallback): Promise<any>;
|
||||
savepoint(fn: SQLSavepointContextCallback): Promise<any>;
|
||||
savepoint<T>(name: string, fn: SQLSavepointContextCallback<T>): Promise<T>;
|
||||
savepoint<T>(fn: SQLSavepointContextCallback<T>): Promise<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a savepoint within a transaction
|
||||
*/
|
||||
interface SavepointSQL extends SQL {}
|
||||
|
||||
type CSRFAlgorithm = "blake2b256" | "blake2b512" | "sha256" | "sha384" | "sha512" | "sha512-256";
|
||||
|
||||
interface CSRFGenerateOptions {
|
||||
/**
|
||||
* The number of milliseconds until the token expires. 0 means the token never expires.
|
||||
* @default 24 * 60 * 60 * 1000 (24 hours)
|
||||
*/
|
||||
expiresIn?: number;
|
||||
|
||||
/**
|
||||
* The encoding of the token.
|
||||
* @default "base64url"
|
||||
*/
|
||||
encoding?: "base64" | "base64url" | "hex";
|
||||
|
||||
/**
|
||||
* The algorithm to use for the token.
|
||||
* @default "sha256"
|
||||
@@ -1774,16 +1989,19 @@ declare module "bun" {
|
||||
* The secret to use for the token. If not provided, a random default secret will be generated in memory and used.
|
||||
*/
|
||||
secret?: string;
|
||||
|
||||
/**
|
||||
* The encoding of the token.
|
||||
* @default "base64url"
|
||||
*/
|
||||
encoding?: "base64" | "base64url" | "hex";
|
||||
|
||||
/**
|
||||
* The algorithm to use for the token.
|
||||
* @default "sha256"
|
||||
*/
|
||||
algorithm?: CSRFAlgorithm;
|
||||
|
||||
/**
|
||||
* The number of milliseconds until the token expires. 0 means the token never expires.
|
||||
* @default 24 * 60 * 60 * 1000 (24 hours)
|
||||
@@ -1793,15 +2011,11 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* SQL client
|
||||
*
|
||||
* @category Database
|
||||
*/
|
||||
const sql: SQL;
|
||||
|
||||
/**
|
||||
* SQL client for PostgreSQL
|
||||
*
|
||||
* @category Database
|
||||
*/
|
||||
const postgres: SQL;
|
||||
|
||||
@@ -3296,7 +3510,7 @@ declare module "bun" {
|
||||
[K in HTTPMethod]?: RouteHandlerWithWebSocketUpgrade<T>;
|
||||
};
|
||||
|
||||
type RouteValue<T extends string> = Response | false | RouteHandler<T> | RouteHandlerObject<T>;
|
||||
type RouteValue<T extends string> = Response | false | RouteHandler<T> | RouteHandlerObject<T> | HTMLBundle;
|
||||
type RouteValueWithWebSocketUpgrade<T extends string> =
|
||||
| RouteValue<T>
|
||||
| RouteHandlerWithWebSocketUpgrade<T>
|
||||
@@ -5859,7 +6073,7 @@ declare module "bun" {
|
||||
const isMainThread: boolean;
|
||||
|
||||
/**
|
||||
* Used when importing an HTML file at runtime.
|
||||
* Used when importing an HTML file at runtime or at build time.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
@@ -5867,10 +6081,34 @@ declare module "bun" {
|
||||
* import app from "./index.html";
|
||||
* ```
|
||||
*
|
||||
* Bun.build support for this isn't imlpemented yet.
|
||||
*/
|
||||
|
||||
interface HTMLBundle {
|
||||
index: string;
|
||||
|
||||
/** Array of generated output files with metadata. This only exists when built ahead of time with `Bun.build` or `bun build` */
|
||||
files?: Array<{
|
||||
/** Original source file path. */
|
||||
input?: string;
|
||||
/** Generated output file path (with content hash, if included in naming) */
|
||||
path: string;
|
||||
/** File type/loader used (js, css, html, file, etc.) */
|
||||
loader: Loader;
|
||||
/** Whether this file is an entry point */
|
||||
isEntry: boolean;
|
||||
/** HTTP headers including ETag and Content-Type */
|
||||
headers: {
|
||||
/** ETag for caching */
|
||||
etag: string;
|
||||
/** MIME type with charset */
|
||||
"content-type": string;
|
||||
|
||||
/**
|
||||
* Additional headers may be added in the future.
|
||||
*/
|
||||
[key: string]: string;
|
||||
};
|
||||
}>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -6965,8 +7203,6 @@ declare module "bun" {
|
||||
maxBuffer?: number;
|
||||
}
|
||||
|
||||
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
|
||||
|
||||
type ReadableToIO<X extends Readable> = X extends "pipe" | undefined
|
||||
? ReadableStream<Uint8Array>
|
||||
: X extends BunFile | ArrayBufferView | number
|
||||
@@ -7681,6 +7917,56 @@ declare module "bun" {
|
||||
timestamp?: number | Date,
|
||||
): Buffer;
|
||||
|
||||
/**
|
||||
* Generate a UUIDv5, which is a name-based UUID based on the SHA-1 hash of a namespace UUID and a name.
|
||||
*
|
||||
* @param name The name to use for the UUID
|
||||
* @param namespace The namespace to use for the UUID
|
||||
* @param encoding The encoding to use for the UUID
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* import { randomUUIDv5 } from "bun";
|
||||
* const uuid = randomUUIDv5("www.example.com", "dns");
|
||||
* console.log(uuid); // "6ba7b810-9dad-11d1-80b4-00c04fd430c8"
|
||||
* ```
|
||||
*
|
||||
* ```js
|
||||
* import { randomUUIDv5 } from "bun";
|
||||
* const uuid = randomUUIDv5("www.example.com", "url");
|
||||
* console.log(uuid); // "6ba7b811-9dad-11d1-80b4-00c04fd430c8"
|
||||
* ```
|
||||
*/
|
||||
function randomUUIDv5(
|
||||
name: string | BufferSource,
|
||||
namespace: string | BufferSource | "dns" | "url" | "oid" | "x500",
|
||||
/**
|
||||
* @default "hex"
|
||||
*/
|
||||
encoding?: "hex" | "base64" | "base64url",
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Generate a UUIDv5 as a Buffer
|
||||
*
|
||||
* @param name The name to use for the UUID
|
||||
* @param namespace The namespace to use for the UUID
|
||||
* @param encoding The encoding to use for the UUID
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* import { randomUUIDv5 } from "bun";
|
||||
* const uuid = randomUUIDv5("www.example.com", "url", "buffer");
|
||||
* console.log(uuid); // <Buffer 6b a7 b8 11 9d ad 11 d1 80 b4 00 c0 4f d4 30 c8>
|
||||
* ```
|
||||
*/
|
||||
function randomUUIDv5(
|
||||
name: string | BufferSource,
|
||||
namespace: string | BufferSource | "dns" | "url" | "oid" | "x500",
|
||||
encoding: "buffer",
|
||||
): Buffer;
|
||||
|
||||
/**
|
||||
* Types for `bun.lock`
|
||||
*/
|
||||
|
||||
18
packages/bun-types/deprecated.d.ts
vendored
18
packages/bun-types/deprecated.d.ts
vendored
@@ -14,10 +14,23 @@ declare module "bun" {
|
||||
): void;
|
||||
}
|
||||
|
||||
/** @deprecated Use {@link SQL.Query Bun.SQL.Query} */
|
||||
type SQLQuery<T = any> = SQL.Query<T>;
|
||||
|
||||
/** @deprecated Use {@link SQL.TransactionContextCallback Bun.SQL.TransactionContextCallback} */
|
||||
type SQLTransactionContextCallback<T> = SQL.TransactionContextCallback<T>;
|
||||
|
||||
/** @deprecated Use {@link SQL.SavepointContextCallback Bun.SQL.SavepointContextCallback} */
|
||||
type SQLSavepointContextCallback<T> = SQL.SavepointContextCallback<T>;
|
||||
|
||||
/** @deprecated Use {@link SQL.Options Bun.SQL.Options} */
|
||||
type SQLOptions = SQL.Options;
|
||||
|
||||
/**
|
||||
* @deprecated Renamed to `ErrorLike`
|
||||
*/
|
||||
type Errorlike = ErrorLike;
|
||||
|
||||
interface TLSOptions {
|
||||
/**
|
||||
* File path to a TLS key
|
||||
@@ -27,6 +40,7 @@ declare module "bun" {
|
||||
* @deprecated since v0.6.3 - Use `key: Bun.file(path)` instead.
|
||||
*/
|
||||
keyFile?: string;
|
||||
|
||||
/**
|
||||
* File path to a TLS certificate
|
||||
*
|
||||
@@ -35,6 +49,7 @@ declare module "bun" {
|
||||
* @deprecated since v0.6.3 - Use `cert: Bun.file(path)` instead.
|
||||
*/
|
||||
certFile?: string;
|
||||
|
||||
/**
|
||||
* File path to a .pem file for a custom root CA
|
||||
*
|
||||
@@ -42,6 +57,9 @@ declare module "bun" {
|
||||
*/
|
||||
caFile?: string;
|
||||
}
|
||||
|
||||
/** @deprecated This type is unused in Bun's declarations and may be removed in the future */
|
||||
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
|
||||
}
|
||||
|
||||
declare namespace NodeJS {
|
||||
|
||||
276
packages/bun-types/experimental.d.ts
vendored
Normal file
276
packages/bun-types/experimental.d.ts
vendored
Normal file
@@ -0,0 +1,276 @@
|
||||
declare module "bun" {
|
||||
export namespace __experimental {
|
||||
/**
|
||||
* Base interface for static site generation route parameters.
|
||||
*
|
||||
* Supports both single string values and arrays of strings for dynamic route segments.
|
||||
* This is typically used for route parameters like `[slug]`, `[...rest]`, or `[id]`.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Simple slug parameter
|
||||
* type BlogParams = { slug: string };
|
||||
*
|
||||
* // Multiple parameters
|
||||
* type ProductParams = {
|
||||
* category: string;
|
||||
* id: string;
|
||||
* };
|
||||
*
|
||||
* // Catch-all routes with string arrays
|
||||
* type DocsParams = {
|
||||
* path: string[];
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export interface SSGParamsLike {
|
||||
[key: string]: string | string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration object for a single static route to be generated.
|
||||
*
|
||||
* Each path object contains the parameters needed to render a specific
|
||||
* instance of a dynamic route at build time.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @template Params - The shape of route parameters for this path
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Single blog post path
|
||||
* const blogPath: SSGPath<{ slug: string }> = {
|
||||
* params: { slug: "my-first-post" }
|
||||
* };
|
||||
*
|
||||
* // Product page with multiple params
|
||||
* const productPath: SSGPath<{ category: string; id: string }> = {
|
||||
* params: {
|
||||
* category: "electronics",
|
||||
* id: "laptop-123"
|
||||
* }
|
||||
* };
|
||||
*
|
||||
* // Documentation with catch-all route
|
||||
* const docsPath: SSGPath<{ path: string[] }> = {
|
||||
* params: { path: ["getting-started", "installation"] }
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export interface SSGPath<Params extends SSGParamsLike = SSGParamsLike> {
|
||||
params: Params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Array of static paths to be generated at build time.
|
||||
*
|
||||
* This type represents the collection of all route configurations
|
||||
* that should be pre-rendered for a dynamic route.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @template Params - The shape of route parameters for these paths
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Array of blog post paths
|
||||
* const blogPaths: SSGPaths<{ slug: string }> = [
|
||||
* { params: { slug: "introduction-to-bun" } },
|
||||
* { params: { slug: "performance-benchmarks" } },
|
||||
* { params: { slug: "getting-started-guide" } }
|
||||
* ];
|
||||
*
|
||||
* // Mixed parameter types
|
||||
* const productPaths: SSGPaths<{ category: string; id: string }> = [
|
||||
* { params: { category: "books", id: "javascript-guide" } },
|
||||
* { params: { category: "electronics", id: "smartphone-x" } }
|
||||
* ];
|
||||
* ```
|
||||
*/
|
||||
export type SSGPaths<Params extends SSGParamsLike = SSGParamsLike> = SSGPath<Params>[];
|
||||
|
||||
/**
|
||||
* Props interface for SSG page components.
|
||||
*
|
||||
* This interface defines the shape of props that will be passed to your
|
||||
* static page components during the build process. The `params` object
|
||||
* contains the route parameters extracted from the URL pattern.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @template Params - The shape of route parameters for this page
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Blog post component props
|
||||
* interface BlogPageProps extends SSGPageProps<{ slug: string }> {
|
||||
* // params: { slug: string } is automatically included
|
||||
* }
|
||||
*
|
||||
* // Product page component props
|
||||
* interface ProductPageProps extends SSGPageProps<{
|
||||
* category: string;
|
||||
* id: string;
|
||||
* }> {
|
||||
* // params: { category: string; id: string } is automatically included
|
||||
* }
|
||||
*
|
||||
* // Usage in component
|
||||
* function BlogPost({ params }: BlogPageProps) {
|
||||
* const { slug } = params; // TypeScript knows slug is a string
|
||||
* return <h1>Blog post: {slug}</h1>;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export interface SSGPageProps<Params extends SSGParamsLike = SSGParamsLike> {
|
||||
params: Params;
|
||||
}
|
||||
|
||||
/**
|
||||
* React component type for SSG pages that can be statically generated.
|
||||
*
|
||||
* This type represents a React component that receives SSG page props
|
||||
* and can be rendered at build time. The component can be either a regular
|
||||
* React component or an async React Server Component for advanced use cases
|
||||
* like data fetching during static generation.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @template Params - The shape of route parameters for this page component
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Regular synchronous SSG page component
|
||||
* const BlogPost: SSGPage<{ slug: string }> = ({ params }) => {
|
||||
* return (
|
||||
* <article>
|
||||
* <h1>Blog Post: {params.slug}</h1>
|
||||
* <p>This content was generated at build time!</p>
|
||||
* </article>
|
||||
* );
|
||||
* };
|
||||
*
|
||||
* // Async React Server Component for data fetching
|
||||
* const AsyncBlogPost: SSGPage<{ slug: string }> = async ({ params }) => {
|
||||
* // Fetch data during static generation
|
||||
* const post = await fetchBlogPost(params.slug);
|
||||
* const author = await fetchAuthor(post.authorId);
|
||||
*
|
||||
* return (
|
||||
* <article>
|
||||
* <h1>{post.title}</h1>
|
||||
* <p>By {author.name}</p>
|
||||
* <div dangerouslySetInnerHTML={{ __html: post.content }} />
|
||||
* </article>
|
||||
* );
|
||||
* };
|
||||
*
|
||||
* // Product page with multiple params and async data fetching
|
||||
* const ProductPage: SSGPage<{ category: string; id: string }> = async ({ params }) => {
|
||||
* const [product, reviews] = await Promise.all([
|
||||
* fetchProduct(params.category, params.id),
|
||||
* fetchProductReviews(params.id)
|
||||
* ]);
|
||||
*
|
||||
* return (
|
||||
* <div>
|
||||
* <h1>{product.name}</h1>
|
||||
* <p>Category: {params.category}</p>
|
||||
* <p>Price: ${product.price}</p>
|
||||
* <div>
|
||||
* <h2>Reviews ({reviews.length})</h2>
|
||||
* {reviews.map(review => (
|
||||
* <div key={review.id}>{review.comment}</div>
|
||||
* ))}
|
||||
* </div>
|
||||
* </div>
|
||||
* );
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export type SSGPage<Params extends SSGParamsLike = SSGParamsLike> = React.ComponentType<SSGPageProps<Params>>;
|
||||
|
||||
/**
|
||||
* getStaticPaths is Bun's implementation of SSG (Static Site Generation) path determination.
|
||||
*
|
||||
* This function is called at your app's build time to determine which
|
||||
* dynamic routes should be pre-rendered as static pages. It returns an
|
||||
* array of path parameters that will be used to generate static pages for
|
||||
* dynamic routes (e.g., [slug].tsx, [category]/[id].tsx).
|
||||
*
|
||||
* The function can be either synchronous or asynchronous, allowing you to
|
||||
* fetch data from APIs, databases, or file systems to determine which paths
|
||||
* should be statically generated.
|
||||
*
|
||||
* @warning These APIs are experimental and might be moved/changed in future releases.
|
||||
*
|
||||
* @template Params - The shape of route parameters for the dynamic route
|
||||
*
|
||||
* @returns An object containing an array of paths to be statically generated
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // In pages/blog/[slug].tsx ———————————————————╮
|
||||
* export const getStaticPaths: GetStaticPaths<{ slug: string }> = async () => {
|
||||
* // Fetch all blog posts from your CMS or API at build time
|
||||
* const posts = await fetchBlogPosts();
|
||||
*
|
||||
* return {
|
||||
* paths: posts.map((post) => ({
|
||||
* params: { slug: post.slug }
|
||||
* }))
|
||||
* };
|
||||
* };
|
||||
*
|
||||
* // In pages/products/[category]/[id].tsx
|
||||
* export const getStaticPaths: GetStaticPaths<{
|
||||
* category: string;
|
||||
* id: string;
|
||||
* }> = async () => {
|
||||
* // Fetch products from database
|
||||
* const products = await db.products.findMany({
|
||||
* select: { id: true, category: { slug: true } }
|
||||
* });
|
||||
*
|
||||
* return {
|
||||
* paths: products.map(product => ({
|
||||
* params: {
|
||||
* category: product.category.slug,
|
||||
* id: product.id
|
||||
* }
|
||||
* }))
|
||||
* };
|
||||
* };
|
||||
*
|
||||
* // In pages/docs/[...path].tsx (catch-all route)
|
||||
* export const getStaticPaths: GetStaticPaths<{ path: string[] }> = async () => {
|
||||
* // Read documentation structure from file system
|
||||
* const docPaths = await getDocumentationPaths('./content/docs');
|
||||
*
|
||||
* return {
|
||||
* paths: docPaths.map(docPath => ({
|
||||
* params: { path: docPath.split('/') }
|
||||
* }))
|
||||
* };
|
||||
* };
|
||||
*
|
||||
* // Synchronous example with static data
|
||||
* export const getStaticPaths: GetStaticPaths<{ id: string }> = () => {
|
||||
* const staticIds = ['1', '2', '3', '4', '5'];
|
||||
*
|
||||
* return {
|
||||
* paths: staticIds.map(id => ({
|
||||
* params: { id }
|
||||
* }))
|
||||
* };
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export type GetStaticPaths<Params extends SSGParamsLike = SSGParamsLike> = () => MaybePromise<{
|
||||
paths: SSGPaths<Params>;
|
||||
}>;
|
||||
}
|
||||
}
|
||||
4
packages/bun-types/extensions.d.ts
vendored
4
packages/bun-types/extensions.d.ts
vendored
@@ -19,7 +19,7 @@ declare module "*/bun.lock" {
|
||||
}
|
||||
|
||||
declare module "*.html" {
|
||||
// In Bun v1.2, this might change to Bun.HTMLBundle
|
||||
var contents: any;
|
||||
var contents: import("bun").HTMLBundle;
|
||||
|
||||
export = contents;
|
||||
}
|
||||
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -20,6 +20,7 @@
|
||||
/// <reference path="./deprecated.d.ts" />
|
||||
/// <reference path="./redis.d.ts" />
|
||||
/// <reference path="./shell.d.ts" />
|
||||
/// <reference path="./experimental.d.ts" />
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
|
||||
21
packages/bun-types/overrides.d.ts
vendored
21
packages/bun-types/overrides.d.ts
vendored
@@ -1,5 +1,26 @@
|
||||
export {};
|
||||
|
||||
declare module "stream/web" {
|
||||
interface ReadableStream {
|
||||
/**
|
||||
* Consume a ReadableStream as text
|
||||
*/
|
||||
text(): Promise<string>;
|
||||
/**
|
||||
* Consume a ReadableStream as a Uint8Array
|
||||
*/
|
||||
bytes(): Promise<Uint8Array>;
|
||||
/**
|
||||
* Consume a ReadableStream as JSON
|
||||
*/
|
||||
json(): Promise<any>;
|
||||
/**
|
||||
* Consume a ReadableStream as a Blob
|
||||
*/
|
||||
blob(): Promise<Blob>;
|
||||
}
|
||||
}
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv extends Bun.Env, ImportMetaEnv {}
|
||||
|
||||
@@ -19,7 +19,11 @@
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
8
packages/bun-types/test.d.ts
vendored
8
packages/bun-types/test.d.ts
vendored
@@ -1184,14 +1184,6 @@ declare module "bun:test" {
|
||||
* expect(null).toBeInstanceOf(Array); // fail
|
||||
*/
|
||||
toBeInstanceOf(value: unknown): void;
|
||||
/**
|
||||
* Asserts that the expected value is an instance of value
|
||||
*
|
||||
* @example
|
||||
* expect([]).toBeInstanceOf(Array);
|
||||
* expect(null).toBeInstanceOf(Array); // fail
|
||||
*/
|
||||
toBeInstanceOf(value: unknown): void;
|
||||
/**
|
||||
* Asserts that a value is `undefined`.
|
||||
*
|
||||
|
||||
@@ -95,8 +95,7 @@ WIN32_EXPORT struct us_socket_context_t *us_create_child_socket_context(int ssl,
|
||||
|
||||
```c
|
||||
/* Write up to length bytes of data. Returns actual bytes written. Will call the on_writable callback of active socket context on failure to write everything off in one go.
|
||||
* Set hint msg_more if you have more immediate data to write. */
|
||||
WIN32_EXPORT int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more);
|
||||
WIN32_EXPORT int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length);
|
||||
|
||||
/* Set a low precision, high performance timer on a socket. A socket can only have one single active timer at any given point in time. Will remove any such pre set timer */
|
||||
WIN32_EXPORT void us_socket_timeout(int ssl, struct us_socket_t *s, unsigned int seconds);
|
||||
|
||||
@@ -762,9 +762,9 @@ ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_le
|
||||
}
|
||||
#else
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
ssize_t written = bsd_send(fd, header, header_length, 0);
|
||||
ssize_t written = bsd_send(fd, header, header_length);
|
||||
if (written == header_length) {
|
||||
ssize_t second_write = bsd_send(fd, payload, payload_length, 0);
|
||||
ssize_t second_write = bsd_send(fd, payload, payload_length);
|
||||
if (second_write > 0) {
|
||||
written += second_write;
|
||||
}
|
||||
@@ -773,7 +773,7 @@ ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_le
|
||||
}
|
||||
#endif
|
||||
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) {
|
||||
while (1) {
|
||||
// MSG_MORE (Linux), MSG_PARTIAL (Windows), TCP_NOPUSH (BSD)
|
||||
|
||||
@@ -781,13 +781,8 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int ms
|
||||
#define MSG_NOSIGNAL 0
|
||||
#endif
|
||||
|
||||
#ifdef MSG_MORE
|
||||
// for Linux we do not want signals
|
||||
ssize_t rc = send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#else
|
||||
// use TCP_NOPUSH
|
||||
ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#endif
|
||||
// use TCP_NOPUSH
|
||||
ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
|
||||
if (UNLIKELY(IS_EINTR(rc))) {
|
||||
continue;
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
#ifndef _WIN32
|
||||
#include <arpa/inet.h>
|
||||
#endif
|
||||
|
||||
#define CONCURRENT_CONNECTIONS 4
|
||||
|
||||
// clang-format off
|
||||
@@ -43,17 +42,20 @@ int us_raw_root_certs(struct us_cert_string_t**out){
|
||||
|
||||
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
/* us_listen_socket_t extends us_socket_t so we close in similar ways */
|
||||
if (!us_socket_is_closed(0, &ls->s)) {
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, ls->s.context, ls);
|
||||
us_poll_stop((struct us_poll_t *) &ls->s, ls->s.context->loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) &ls->s));
|
||||
struct us_socket_t* s = &ls->s;
|
||||
if (!us_socket_is_closed(0, s)) {
|
||||
struct us_socket_context_t* context = s->context;
|
||||
struct us_loop_t* loop = context->loop;
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, context, ls);
|
||||
us_poll_stop((struct us_poll_t *) s, loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) s));
|
||||
|
||||
/* Link this socket to the close-list and let it be deleted after this iteration */
|
||||
ls->s.next = ls->s.context->loop->data.closed_head;
|
||||
ls->s.context->loop->data.closed_head = &ls->s;
|
||||
s->next = loop->data.closed_head;
|
||||
loop->data.closed_head = s;
|
||||
|
||||
/* Any socket with prev = context is marked as closed */
|
||||
ls->s.prev = (struct us_socket_t *) ls->s.context;
|
||||
s->prev = (struct us_socket_t *) context;
|
||||
}
|
||||
|
||||
/* We cannot immediately free a listen socket as we can be inside an accept loop */
|
||||
@@ -91,16 +93,18 @@ void us_internal_socket_context_unlink_listen_socket(int ssl, struct us_socket_c
|
||||
context->iterator = ls->s.next;
|
||||
}
|
||||
|
||||
if (ls->s.prev == ls->s.next) {
|
||||
struct us_socket_t* prev = ls->s.prev;
|
||||
struct us_socket_t* next = ls->s.next;
|
||||
if (prev == next) {
|
||||
context->head_listen_sockets = 0;
|
||||
} else {
|
||||
if (ls->s.prev) {
|
||||
ls->s.prev->next = ls->s.next;
|
||||
if (prev) {
|
||||
prev->next = next;
|
||||
} else {
|
||||
context->head_listen_sockets = (struct us_listen_socket_t *) ls->s.next;
|
||||
context->head_listen_sockets = (struct us_listen_socket_t *) next;
|
||||
}
|
||||
if (ls->s.next) {
|
||||
ls->s.next->prev = ls->s.prev;
|
||||
if (next) {
|
||||
next->prev = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
@@ -112,31 +116,35 @@ void us_internal_socket_context_unlink_socket(int ssl, struct us_socket_context_
|
||||
context->iterator = s->next;
|
||||
}
|
||||
|
||||
if (s->prev == s->next) {
|
||||
struct us_socket_t* prev = s->prev;
|
||||
struct us_socket_t* next = s->next;
|
||||
if (prev == next) {
|
||||
context->head_sockets = 0;
|
||||
} else {
|
||||
if (s->prev) {
|
||||
s->prev->next = s->next;
|
||||
if (prev) {
|
||||
prev->next = next;
|
||||
} else {
|
||||
context->head_sockets = s->next;
|
||||
context->head_sockets = next;
|
||||
}
|
||||
if (s->next) {
|
||||
s->next->prev = s->prev;
|
||||
if (next) {
|
||||
next->prev = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
if (c->prev_pending == c->next_pending) {
|
||||
struct us_connecting_socket_t* prev = c->prev_pending;
|
||||
struct us_connecting_socket_t* next = c->next_pending;
|
||||
if (prev == next) {
|
||||
context->head_connecting_sockets = 0;
|
||||
} else {
|
||||
if (c->prev_pending) {
|
||||
c->prev_pending->next_pending = c->next_pending;
|
||||
if (prev) {
|
||||
prev->next_pending = next;
|
||||
} else {
|
||||
context->head_connecting_sockets = c->next_pending;
|
||||
context->head_connecting_sockets = next;
|
||||
}
|
||||
if (c->next_pending) {
|
||||
c->next_pending->prev_pending = c->prev_pending;
|
||||
if (next) {
|
||||
next->prev_pending = prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
@@ -144,11 +152,12 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
ls->s.context = context;
|
||||
ls->s.next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
ls->s.prev = 0;
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
s->prev = 0;
|
||||
if (context->head_listen_sockets) {
|
||||
context->head_listen_sockets->s.prev = &ls->s;
|
||||
context->head_listen_sockets->s.prev = s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
us_socket_context_ref(0, context);
|
||||
@@ -366,15 +375,15 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
|
||||
us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE);
|
||||
|
||||
struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p;
|
||||
|
||||
ls->s.context = context;
|
||||
ls->s.timeout = 255;
|
||||
ls->s.long_timeout = 255;
|
||||
ls->s.flags.low_prio_state = 0;
|
||||
ls->s.flags.is_paused = 0;
|
||||
ls->s.flags.is_ipc = 0;
|
||||
ls->s.next = 0;
|
||||
ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->timeout = 255;
|
||||
s->long_timeout = 255;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
@@ -400,15 +409,16 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE);
|
||||
|
||||
struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p;
|
||||
ls->s.connect_state = NULL;
|
||||
ls->s.context = context;
|
||||
ls->s.timeout = 255;
|
||||
ls->s.long_timeout = 255;
|
||||
ls->s.flags.low_prio_state = 0;
|
||||
ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
ls->s.flags.is_paused = 0;
|
||||
ls->s.flags.is_ipc = 0;
|
||||
ls->s.next = 0;
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->connect_state = NULL;
|
||||
s->context = context;
|
||||
s->timeout = 255;
|
||||
s->long_timeout = 255;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
@@ -515,9 +525,10 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
}
|
||||
|
||||
// if there is only one result we can immediately connect
|
||||
if (result->entries && result->entries->info.ai_next == NULL) {
|
||||
struct addrinfo_result_entry* entries = result->entries;
|
||||
if (entries && entries->info.ai_next == NULL) {
|
||||
struct sockaddr_storage addr;
|
||||
init_addr_with_port(&result->entries->info, port, &addr);
|
||||
init_addr_with_port(&entries->info, port, &addr);
|
||||
*has_dns_resolved = 1;
|
||||
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
Bun__addrinfo_freeRequest(ai_req, s == NULL);
|
||||
@@ -557,17 +568,19 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
}
|
||||
++opened;
|
||||
bsd_socket_nodelay(connect_socket_fd, 1);
|
||||
|
||||
struct us_socket_t *s = (struct us_socket_t *)us_create_poll(c->context->loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size);
|
||||
s->context = c->context;
|
||||
struct us_loop_t* loop = c->context->loop;
|
||||
struct us_socket_context_t* context = c->context;
|
||||
struct us_socket_t *s = (struct us_socket_t *)us_create_poll(loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size);
|
||||
s->context = context;
|
||||
s->timeout = c->timeout;
|
||||
s->long_timeout = c->long_timeout;
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
struct us_socket_flags* flags = &s->flags;
|
||||
flags->low_prio_state = 0;
|
||||
flags->allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
flags->is_paused = 0;
|
||||
flags->is_ipc = 0;
|
||||
/* Link it into context so that timeout fires properly */
|
||||
us_internal_socket_context_link_socket(s->context, s);
|
||||
us_internal_socket_context_link_socket(context, s);
|
||||
|
||||
// TODO check this, specifically how it interacts with the SSL code
|
||||
// does this work when we create multiple sockets at once? will we need multiple SSL contexts?
|
||||
@@ -579,10 +592,10 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
c->connecting_head = s;
|
||||
|
||||
s->connect_state = c;
|
||||
|
||||
struct us_poll_t* poll = &s->p;
|
||||
/* Connect sockets are semi-sockets just like listen sockets */
|
||||
us_poll_init(&s->p, connect_socket_fd, POLL_TYPE_SEMI_SOCKET);
|
||||
us_poll_start(&s->p, s->context->loop, LIBUS_SOCKET_WRITABLE);
|
||||
us_poll_init(poll, connect_socket_fd, POLL_TYPE_SEMI_SOCKET);
|
||||
us_poll_start(poll, loop, LIBUS_SOCKET_WRITABLE);
|
||||
}
|
||||
return opened;
|
||||
}
|
||||
@@ -774,42 +787,50 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
struct us_socket_context_t *old_context = s->context;
|
||||
struct us_loop_t *loop = old_context->loop;
|
||||
/* We need to be sure that we still holding a reference*/
|
||||
us_socket_context_ref(ssl, old_context);
|
||||
if (s->flags.low_prio_state != 1) {
|
||||
/* We need to be sure that we still holding a reference*/
|
||||
us_socket_context_ref(ssl, context);
|
||||
/* This properly updates the iterator if in on_timeout */
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, old_context, s);
|
||||
} else {
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
}
|
||||
|
||||
|
||||
struct us_connecting_socket_t *c = s->connect_state;
|
||||
|
||||
|
||||
struct us_socket_t *new_s = s;
|
||||
|
||||
if (ext_size != -1) {
|
||||
new_s = (struct us_socket_t *) us_poll_resize(&s->p, s->context->loop, sizeof(struct us_socket_t) + ext_size);
|
||||
struct us_poll_t *pool_ref = &s->p;
|
||||
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size);
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
struct us_socket_context_t *old_context = s->context;
|
||||
c->context = context;
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
us_internal_socket_context_unlink_connecting_socket(ssl, old_context, c);
|
||||
us_internal_socket_context_link_connecting_socket(ssl, context, c);
|
||||
}
|
||||
}
|
||||
new_s->context = context;
|
||||
new_s->timeout = 255;
|
||||
new_s->long_timeout = 255;
|
||||
|
||||
if (new_s->flags.low_prio_state == 1) {
|
||||
/* update pointers in low-priority queue */
|
||||
if (!new_s->prev) new_s->context->loop->data.low_prio_head = new_s;
|
||||
if (!new_s->prev) loop->data.low_prio_head = new_s;
|
||||
else new_s->prev->next = new_s;
|
||||
|
||||
if (new_s->next) new_s->next->prev = new_s;
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_ref(ssl, context);
|
||||
} else {
|
||||
us_internal_socket_context_link_socket(context, new_s);
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
/* We can safely unref the old context here with can potentially be freed */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
return new_s;
|
||||
}
|
||||
|
||||
|
||||
@@ -52,10 +52,6 @@ struct loop_ssl_data {
|
||||
unsigned int ssl_read_input_offset;
|
||||
|
||||
struct us_socket_t *ssl_socket;
|
||||
|
||||
int last_write_was_msg_more;
|
||||
int msg_more;
|
||||
|
||||
BIO *shared_rbio;
|
||||
BIO *shared_wbio;
|
||||
BIO_METHOD *shared_biom;
|
||||
@@ -139,10 +135,7 @@ int BIO_s_custom_write(BIO *bio, const char *data, int length) {
|
||||
struct loop_ssl_data *loop_ssl_data =
|
||||
(struct loop_ssl_data *)BIO_get_data(bio);
|
||||
|
||||
loop_ssl_data->last_write_was_msg_more =
|
||||
loop_ssl_data->msg_more || length == 16413;
|
||||
int written = us_socket_write(0, loop_ssl_data->ssl_socket, data, length,
|
||||
loop_ssl_data->last_write_was_msg_more);
|
||||
int written = us_socket_write(0, loop_ssl_data->ssl_socket, data, length);
|
||||
|
||||
BIO_clear_retry_flags(bio);
|
||||
if (!written) {
|
||||
@@ -192,7 +185,6 @@ struct loop_ssl_data * us_internal_set_loop_ssl_data(struct us_internal_ssl_sock
|
||||
loop_ssl_data->ssl_read_input_length = 0;
|
||||
loop_ssl_data->ssl_read_input_offset = 0;
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
return loop_ssl_data;
|
||||
}
|
||||
|
||||
@@ -665,8 +657,6 @@ void us_internal_init_loop_ssl_data(struct us_loop_t *loop) {
|
||||
us_calloc(1, sizeof(struct loop_ssl_data));
|
||||
loop_ssl_data->ssl_read_input_length = 0;
|
||||
loop_ssl_data->ssl_read_input_offset = 0;
|
||||
loop_ssl_data->last_write_was_msg_more = 0;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
loop_ssl_data->ssl_read_output =
|
||||
us_malloc(LIBUS_RECV_BUFFER_LENGTH + LIBUS_RECV_BUFFER_PADDING * 2);
|
||||
@@ -1741,17 +1731,16 @@ us_internal_ssl_socket_get_native_handle(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s,
|
||||
const char *data, int length,
|
||||
int msg_more) {
|
||||
const char *data, int length) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
return 0;
|
||||
}
|
||||
return us_socket_write(0, &s->s, data, length, msg_more);
|
||||
return us_socket_write(0, &s->s, data, length);
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
|
||||
const char *data, int length, int msg_more) {
|
||||
const char *data, int length) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) {
|
||||
return 0;
|
||||
@@ -1772,14 +1761,8 @@ int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
|
||||
loop_ssl_data->ssl_read_input_length = 0;
|
||||
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = msg_more;
|
||||
loop_ssl_data->last_write_was_msg_more = 0;
|
||||
|
||||
int written = SSL_write(s->ssl, data, length);
|
||||
loop_ssl_data->msg_more = 0;
|
||||
if (loop_ssl_data->last_write_was_msg_more && !msg_more) {
|
||||
us_socket_flush(0, &s->s);
|
||||
}
|
||||
|
||||
if (written > 0) {
|
||||
return written;
|
||||
@@ -1836,7 +1819,6 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
|
||||
// on_data and checked in the BIO
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
|
||||
loop_ssl_data->msg_more = 0;
|
||||
// sets SSL_SENT_SHUTDOWN and waits for the other side to do the same
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
#include "internal/internal.h"
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL) || defined(LIBUS_USE_KQUEUE)
|
||||
|
||||
void Bun__internal_dispatch_ready_poll(void* loop, void* poll);
|
||||
@@ -338,7 +337,7 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop, struct
|
||||
|
||||
// if new events does not contain the ready events of this poll then remove (no we filter that out later on)
|
||||
SET_READY_POLL(loop, i, new_poll);
|
||||
|
||||
|
||||
num_entries_possibly_remaining--;
|
||||
}
|
||||
}
|
||||
@@ -381,19 +380,18 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int ext_size) {
|
||||
int events = us_poll_events(p);
|
||||
|
||||
|
||||
struct us_poll_t *new_p = us_realloc(p, sizeof(struct us_poll_t) + ext_size);
|
||||
if (p != new_p && events) {
|
||||
if (p != new_p) {
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
/* Hack: forcefully update poll by stripping away already set events */
|
||||
new_p->state.poll_type = us_internal_poll_type(new_p);
|
||||
us_poll_change(new_p, loop, events);
|
||||
#else
|
||||
/* Forcefully update poll by resetting them with new_p as user data */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, events, new_p);
|
||||
#endif
|
||||
|
||||
/* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p);
|
||||
#endif /* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events);
|
||||
}
|
||||
|
||||
@@ -447,7 +445,7 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
|
||||
#endif
|
||||
/* Set all removed events to null-polls in pending ready poll list */
|
||||
//us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
// us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -421,10 +421,9 @@ struct us_socket_t *us_internal_ssl_socket_context_connect_unix(
|
||||
size_t pathlen, int options, int socket_ext_size);
|
||||
|
||||
int us_internal_ssl_socket_write(us_internal_ssl_socket_r s,
|
||||
const char *data, int length, int msg_more);
|
||||
const char *data, int length);
|
||||
int us_internal_ssl_socket_raw_write(us_internal_ssl_socket_r s,
|
||||
const char *data, int length,
|
||||
int msg_more);
|
||||
const char *data, int length);
|
||||
|
||||
void us_internal_ssl_socket_timeout(us_internal_ssl_socket_r s,
|
||||
unsigned int seconds);
|
||||
|
||||
@@ -210,7 +210,7 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags);
|
||||
#if !defined(_WIN32)
|
||||
ssize_t bsd_recvmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct msghdr *msg, int flags);
|
||||
#endif
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more);
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length);
|
||||
#if !defined(_WIN32)
|
||||
ssize_t bsd_sendmsg(LIBUS_SOCKET_DESCRIPTOR fd, const struct msghdr *msg, int flags);
|
||||
#endif
|
||||
|
||||
@@ -419,9 +419,8 @@ struct us_poll_t *us_poll_resize(us_poll_r p, us_loop_r loop, unsigned int ext_s
|
||||
void *us_socket_get_native_handle(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
|
||||
/* Write up to length bytes of data. Returns actual bytes written.
|
||||
* Will call the on_writable callback of active socket context on failure to write everything off in one go.
|
||||
* Set hint msg_more if you have more immediate data to write. */
|
||||
int us_socket_write(int ssl, us_socket_r s, const char * nonnull_arg data, int length, int msg_more) nonnull_fn_decl;
|
||||
* Will call the on_writable callback of active socket context on failure to write everything off in one go. */
|
||||
int us_socket_write(int ssl, us_socket_r s, const char * nonnull_arg data, int length) nonnull_fn_decl;
|
||||
|
||||
/* Special path for non-SSL sockets. Used to send header and payload in one go. Works like us_socket_write. */
|
||||
int us_socket_write2(int ssl, us_socket_r s, const char *header, int header_length, const char *payload, int payload_length) nonnull_fn_decl;
|
||||
@@ -440,7 +439,7 @@ void *us_connecting_socket_ext(int ssl, struct us_connecting_socket_t *c) nonnul
|
||||
/* Return the socket context of this socket */
|
||||
struct us_socket_context_t *us_socket_context(int ssl, us_socket_r s) nonnull_fn_decl __attribute__((returns_nonnull));
|
||||
|
||||
/* Withdraw any msg_more status and flush any pending data */
|
||||
/* Flush any pending data */
|
||||
void us_socket_flush(int ssl, us_socket_r s) nonnull_fn_decl;
|
||||
|
||||
/* Shuts down the connection by sending FIN and/or close_notify */
|
||||
@@ -471,7 +470,7 @@ void us_socket_local_address(int ssl, us_socket_r s, char *nonnull_arg buf, int
|
||||
struct us_socket_t *us_socket_pair(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR* fds);
|
||||
struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR fd, int ipc);
|
||||
struct us_socket_t *us_socket_wrap_with_tls(int ssl, us_socket_r s, struct us_bun_socket_context_options_t options, struct us_socket_events_t events, int socket_ext_size);
|
||||
int us_socket_raw_write(int ssl, us_socket_r s, const char *data, int length, int msg_more);
|
||||
int us_socket_raw_write(int ssl, us_socket_r s, const char *data, int length);
|
||||
struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_client, char* ip, int ip_length);
|
||||
int us_raw_root_certs(struct us_cert_string_t**out);
|
||||
unsigned int us_get_remote_address_info(char *buf, us_socket_r s, const char **dest, int *port, int *is_ipv6);
|
||||
|
||||
@@ -336,12 +336,13 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
case POLL_TYPE_SOCKET: {
|
||||
/* We should only use s, no p after this point */
|
||||
struct us_socket_t *s = (struct us_socket_t *) p;
|
||||
|
||||
/* The context can change after calling a callback but the loop is always the same */
|
||||
struct us_loop_t* loop = s->context->loop;
|
||||
if (events & LIBUS_SOCKET_WRITABLE && !error) {
|
||||
/* Note: if we failed a write as a socket of one loop then adopted
|
||||
* to another loop, this will be wrong. Absurd case though */
|
||||
s->context->loop->data.last_write_failed = 0;
|
||||
|
||||
loop->data.last_write_failed = 0;
|
||||
|
||||
s = s->context->on_writable(s);
|
||||
|
||||
if (!s || us_socket_is_closed(0, s)) {
|
||||
@@ -349,8 +350,8 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
}
|
||||
|
||||
/* If we have no failed write or if we shut down, then stop polling for more writable */
|
||||
if (!s->context->loop->data.last_write_failed || us_socket_is_shut_down(0, s)) {
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE);
|
||||
if (!loop->data.last_write_failed || us_socket_is_shut_down(0, s)) {
|
||||
us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,25 +359,28 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* Contexts may prioritize down sockets that are currently readable, e.g. when SSL handshake has to be done.
|
||||
* SSL handshakes are CPU intensive, so we limit the number of handshakes per loop iteration, and move the rest
|
||||
* to the low-priority queue */
|
||||
if (s->context->is_low_prio(s)) {
|
||||
if (s->flags.low_prio_state == 2) {
|
||||
s->flags.low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */
|
||||
} else if (s->context->loop->data.low_prio_budget > 0) {
|
||||
s->context->loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */
|
||||
struct us_socket_context_t *context = s->context;
|
||||
struct us_socket_flags* flags = &s->flags;
|
||||
if (context->is_low_prio(s)) {
|
||||
if (flags->low_prio_state == 2) {
|
||||
flags->low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */
|
||||
} else if (loop->data.low_prio_budget > 0) {
|
||||
loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */
|
||||
} else {
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
us_socket_context_ref(0, s->context);
|
||||
us_internal_socket_context_unlink_socket(0, s->context, s);
|
||||
struct us_poll_t* poll = &s->p;
|
||||
us_poll_change(poll, loop, us_poll_events(poll) & LIBUS_SOCKET_WRITABLE);
|
||||
us_socket_context_ref(0, context);
|
||||
us_internal_socket_context_unlink_socket(0, context, s);
|
||||
|
||||
/* Link this socket to the low-priority queue - we use a LIFO queue, to prioritize newer clients that are
|
||||
* maybe not already timeouted - sounds unfair, but works better in real-life with smaller client-timeouts
|
||||
* under high load */
|
||||
s->prev = 0;
|
||||
s->next = s->context->loop->data.low_prio_head;
|
||||
s->next = loop->data.low_prio_head;
|
||||
if (s->next) s->next->prev = s;
|
||||
s->context->loop->data.low_prio_head = s;
|
||||
loop->data.low_prio_head = s;
|
||||
|
||||
s->flags.low_prio_state = 1;
|
||||
flags->low_prio_state = 1;
|
||||
|
||||
break;
|
||||
}
|
||||
@@ -385,7 +389,6 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
size_t repeat_recv_count = 0;
|
||||
|
||||
do {
|
||||
const struct us_loop_t* loop = s->context->loop;
|
||||
#ifdef _WIN32
|
||||
const int recv_flags = MSG_PUSH_IMMEDIATE;
|
||||
#else
|
||||
@@ -478,7 +481,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
}
|
||||
if(s->flags.allow_half_open) {
|
||||
/* We got a Error but is EOF and we allow half open so stop polling for readable and keep going*/
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
s = s->context->on_end(s);
|
||||
} else {
|
||||
/* We dont allow half open just emit end and close the socket */
|
||||
|
||||
@@ -357,17 +357,17 @@ void *us_connecting_socket_get_native_handle(int ssl, struct us_connecting_socke
|
||||
return (void *) (uintptr_t) -1;
|
||||
}
|
||||
|
||||
int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more) {
|
||||
int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
return us_internal_ssl_socket_write((struct us_internal_ssl_socket_t *) s, data, length, msg_more);
|
||||
return us_internal_ssl_socket_write((struct us_internal_ssl_socket_t *) s, data, length);
|
||||
}
|
||||
#endif
|
||||
if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int written = bsd_send(us_poll_fd(&s->p), data, length, msg_more);
|
||||
int written = bsd_send(us_poll_fd(&s->p), data, length);
|
||||
if (written != length) {
|
||||
s->context->loop->data.last_write_failed = 1;
|
||||
us_poll_change(&s->p, s->context->loop, LIBUS_SOCKET_READABLE | LIBUS_SOCKET_WRITABLE);
|
||||
@@ -495,14 +495,14 @@ struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_clien
|
||||
}
|
||||
|
||||
|
||||
int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more) {
|
||||
int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int length) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
return us_internal_ssl_socket_raw_write((struct us_internal_ssl_socket_t *) s, data, length, msg_more);
|
||||
return us_internal_ssl_socket_raw_write((struct us_internal_ssl_socket_t *) s, data, length);
|
||||
}
|
||||
#endif
|
||||
// non-TLS is always raw
|
||||
return us_socket_write(ssl, s, data, length, msg_more);
|
||||
return us_socket_write(ssl, s, data, length);
|
||||
}
|
||||
|
||||
unsigned int us_get_remote_address_info(char *buf, struct us_socket_t *s, const char **dest, int *port, int *is_ipv6)
|
||||
|
||||
@@ -247,7 +247,7 @@ public:
|
||||
int max_flush_len = std::min(buffer_len, (size_t)INT_MAX);
|
||||
|
||||
/* Attempt to write data to the socket */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, 0);
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len);
|
||||
total_written += written;
|
||||
|
||||
/* Check if we couldn't write the entire buffer */
|
||||
@@ -297,7 +297,7 @@ public:
|
||||
int max_flush_len = std::min(buffer_len, (size_t)INT_MAX);
|
||||
|
||||
/* Write off as much as we can */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, /*nextLength != 0 | */length);
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len);
|
||||
/* On failure return, otherwise continue down the function */
|
||||
if ((unsigned int) written < buffer_len) {
|
||||
/* Update buffering (todo: we can do better here if we keep track of what happens to this guy later on) */
|
||||
@@ -342,7 +342,7 @@ public:
|
||||
}
|
||||
} else {
|
||||
/* We are not corked */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, src, length, nextLength != 0);
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, src, length);
|
||||
|
||||
/* Did we fail? */
|
||||
if (written < length) {
|
||||
|
||||
@@ -383,7 +383,7 @@ private:
|
||||
httpContextData->onClientError(SSL, s, result.parserError, data, length);
|
||||
}
|
||||
/* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */
|
||||
us_socket_write(SSL, s, httpErrorResponses[httpErrorStatusCode].data(), (int) httpErrorResponses[httpErrorStatusCode].length(), false);
|
||||
us_socket_write(SSL, s, httpErrorResponses[httpErrorStatusCode].data(), (int) httpErrorResponses[httpErrorStatusCode].length());
|
||||
us_socket_shutdown(SSL, s);
|
||||
/* Close any socket on HTTP errors */
|
||||
us_socket_close(SSL, s, 0, nullptr);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Version: 8
|
||||
# Version: 9
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on Windows 10 or newer with PowerShell.
|
||||
|
||||
@@ -240,11 +240,11 @@ function Install-Git {
|
||||
}
|
||||
|
||||
function Install-NodeJs {
|
||||
Install-Package nodejs -Command node -Version "22.9.0"
|
||||
Install-Package nodejs -Command node -Version "24.3.0"
|
||||
}
|
||||
|
||||
function Install-Bun {
|
||||
Install-Package bun -Version "1.1.30"
|
||||
Install-Package bun -Version "1.2.17"
|
||||
}
|
||||
|
||||
function Install-Cygwin {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 11
|
||||
# Version: 14
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -130,7 +130,7 @@ create_directory() {
|
||||
create_tmp_directory() {
|
||||
mktemp="$(require mktemp)"
|
||||
path="$(execute "$mktemp" -d)"
|
||||
grant_to_user "$path"
|
||||
grant_to_user "$path"
|
||||
print "$path"
|
||||
}
|
||||
|
||||
@@ -191,10 +191,21 @@ download_file() {
|
||||
|
||||
fetch "$file_url" >"$file_tmp_path"
|
||||
grant_to_user "$file_tmp_path"
|
||||
|
||||
|
||||
print "$file_tmp_path"
|
||||
}
|
||||
|
||||
# path=$(download_and_verify_file URL sha256)
|
||||
download_and_verify_file() {
|
||||
file_url="$1"
|
||||
hash="$2"
|
||||
|
||||
path=$(download_file "$file_url")
|
||||
execute sh -c 'echo "'"$hash $path"'" | sha256sum -c' >/dev/null 2>&1
|
||||
|
||||
print "$path"
|
||||
}
|
||||
|
||||
append_to_profile() {
|
||||
content="$1"
|
||||
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
|
||||
@@ -317,7 +328,7 @@ check_operating_system() {
|
||||
distro="$("$sw_vers" -productName)"
|
||||
release="$("$sw_vers" -productVersion)"
|
||||
fi
|
||||
|
||||
|
||||
case "$arch" in
|
||||
x64)
|
||||
sysctl="$(which sysctl)"
|
||||
@@ -400,7 +411,7 @@ check_package_manager() {
|
||||
pm="brew"
|
||||
;;
|
||||
linux)
|
||||
if [ -f "$(which apt)" ]; then
|
||||
if [ -f "$(which apt-get)" ]; then
|
||||
pm="apt"
|
||||
elif [ -f "$(which dnf)" ]; then
|
||||
pm="dnf"
|
||||
@@ -470,10 +481,8 @@ check_ulimit() {
|
||||
|
||||
print "Checking ulimits..."
|
||||
systemd_conf="/etc/systemd/system.conf"
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
limits_conf="/etc/security/limits.d/99-unlimited.conf"
|
||||
create_file "$limits_conf"
|
||||
fi
|
||||
limits_conf="/etc/security/limits.d/99-unlimited.conf"
|
||||
create_file "$limits_conf"
|
||||
|
||||
limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue"
|
||||
for limit in $limits; do
|
||||
@@ -495,6 +504,10 @@ check_ulimit() {
|
||||
fi
|
||||
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
# in systemd's configuration you need to say "infinity" when you mean "unlimited"
|
||||
if [ "$limit_value" = "unlimited" ]; then
|
||||
limit_value="infinity"
|
||||
fi
|
||||
append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
|
||||
fi
|
||||
done
|
||||
@@ -534,7 +547,7 @@ check_ulimit() {
|
||||
append_file "$dpkg_conf" "force-unsafe-io"
|
||||
append_file "$dpkg_conf" "no-debsig"
|
||||
|
||||
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
|
||||
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
|
||||
execute_sudo create_directory "$(dirname "$apt_conf")"
|
||||
append_file "$apt_conf" 'Acquire::Languages "none";'
|
||||
append_file "$apt_conf" 'Acquire::GzipIndexes "true";'
|
||||
@@ -549,7 +562,7 @@ check_ulimit() {
|
||||
package_manager() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
execute_sudo apt "$@"
|
||||
execute_sudo apt-get "$@"
|
||||
;;
|
||||
dnf)
|
||||
case "$distro" in
|
||||
@@ -598,6 +611,7 @@ install_packages() {
|
||||
package_manager install \
|
||||
--yes \
|
||||
--no-install-recommends \
|
||||
--fix-missing \
|
||||
"$@"
|
||||
;;
|
||||
dnf)
|
||||
@@ -673,7 +687,7 @@ install_common_software() {
|
||||
esac
|
||||
|
||||
case "$distro" in
|
||||
amzn)
|
||||
amzn | alpine)
|
||||
install_packages \
|
||||
tar
|
||||
;;
|
||||
@@ -711,12 +725,7 @@ install_common_software() {
|
||||
}
|
||||
|
||||
nodejs_version_exact() {
|
||||
# https://unofficial-builds.nodejs.org/download/release/
|
||||
if ! [ "$abi" = "musl" ] && [ -n "$abi_version" ] && ! [ "$(compare_version "$abi_version" "2.27")" = "1" ]; then
|
||||
print "16.9.1"
|
||||
else
|
||||
print "22.9.0"
|
||||
fi
|
||||
print "24.3.0"
|
||||
}
|
||||
|
||||
nodejs_version() {
|
||||
@@ -746,26 +755,60 @@ install_nodejs() {
|
||||
;;
|
||||
esac
|
||||
|
||||
# Some distros do not install the node headers by default.
|
||||
# These are needed for certain FFI tests, such as: `cc.test.ts`
|
||||
case "$distro" in
|
||||
alpine | amzn)
|
||||
install_nodejs_headers
|
||||
;;
|
||||
esac
|
||||
# Ensure that Node.js headers are always pre-downloaded so that we don't rely on node-gyp
|
||||
install_nodejs_headers
|
||||
}
|
||||
|
||||
install_nodejs_headers() {
|
||||
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
|
||||
nodejs_version="$(nodejs_version_exact)"
|
||||
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$nodejs_version/node-v$nodejs_version-headers.tar.gz")"
|
||||
nodejs_headers_dir="$(dirname "$nodejs_headers_tar")"
|
||||
execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir"
|
||||
|
||||
nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include"
|
||||
nodejs_headers_include="$nodejs_headers_dir/node-v$nodejs_version/include"
|
||||
execute_sudo cp -R "$nodejs_headers_include/" "/usr"
|
||||
|
||||
# Also install to node-gyp cache locations for different node-gyp versions
|
||||
# This ensures node-gyp finds headers without downloading them
|
||||
setup_node_gyp_cache "$nodejs_version" "$nodejs_headers_dir/node-v$nodejs_version"
|
||||
}
|
||||
|
||||
setup_node_gyp_cache() {
|
||||
nodejs_version="$1"
|
||||
headers_source="$2"
|
||||
|
||||
cache_dir="$home/.cache/node-gyp/$nodejs_version"
|
||||
|
||||
create_directory "$cache_dir"
|
||||
|
||||
# Copy headers
|
||||
if [ -d "$headers_source/include" ]; then
|
||||
cp -R "$headers_source/include" "$cache_dir/" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Create installVersion file (node-gyp expects this)
|
||||
echo "11" > "$cache_dir/installVersion" 2>/dev/null || true
|
||||
|
||||
# For Linux, we don't need .lib files like Windows
|
||||
# but create the directory structure node-gyp expects
|
||||
case "$arch" in
|
||||
x86_64|amd64)
|
||||
create_directory "$cache_dir/lib/x64" 2>/dev/null || true
|
||||
;;
|
||||
aarch64|arm64)
|
||||
create_directory "$cache_dir/lib/arm64" 2>/dev/null || true
|
||||
;;
|
||||
*)
|
||||
create_directory "$cache_dir/lib" 2>/dev/null || true
|
||||
;;
|
||||
esac
|
||||
|
||||
# Ensure entire path is accessible, not just last component
|
||||
grant_to_user "$home/.cache"
|
||||
}
|
||||
|
||||
bun_version_exact() {
|
||||
print "1.2.0"
|
||||
print "1.2.17"
|
||||
}
|
||||
|
||||
install_bun() {
|
||||
@@ -910,7 +953,7 @@ install_llvm() {
|
||||
bash="$(require bash)"
|
||||
llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")"
|
||||
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
|
||||
|
||||
|
||||
# Install llvm-symbolizer explicitly to ensure it's available for ASAN
|
||||
install_packages "llvm-$(llvm_version)-tools"
|
||||
;;
|
||||
@@ -930,7 +973,8 @@ install_llvm() {
|
||||
}
|
||||
|
||||
install_gcc() {
|
||||
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then
|
||||
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
@@ -1332,6 +1376,58 @@ install_chromium() {
|
||||
esac
|
||||
}
|
||||
|
||||
install_age() {
|
||||
# we only use this to encrypt core dumps, which we only have on Linux
|
||||
case "$os" in
|
||||
linux)
|
||||
age_tarball=""
|
||||
case "$arch" in
|
||||
x64)
|
||||
age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-amd64.tar.gz 7df45a6cc87d4da11cc03a539a7470c15b1041ab2b396af088fe9990f7c79d50)"
|
||||
;;
|
||||
aarch64)
|
||||
age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-arm64.tar.gz 57fd79a7ece5fe501f351b9dd51a82fbee1ea8db65a8839db17f5c080245e99f)"
|
||||
;;
|
||||
esac
|
||||
|
||||
age_extract_dir="$(create_tmp_directory)"
|
||||
execute tar -C "$age_extract_dir" -zxf "$age_tarball" age/age
|
||||
move_to_bin "$age_extract_dir/age/age"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
configure_core_dumps() {
|
||||
# we only have core dumps on Linux
|
||||
case "$os" in
|
||||
linux)
|
||||
# set up a directory that the test runner will look in after running tests
|
||||
cores_dir="/var/bun-cores-$distro-$release-$arch"
|
||||
sysctl_file="/etc/sysctl.d/local.conf"
|
||||
create_directory "$cores_dir"
|
||||
# ensure core_pattern will point there
|
||||
# %e = executable filename
|
||||
# %p = pid
|
||||
append_file "$sysctl_file" "kernel.core_pattern = $cores_dir/%e-%p.core"
|
||||
|
||||
# disable apport.service if it exists since it will override the core_pattern
|
||||
if which systemctl >/dev/null; then
|
||||
if systemctl list-unit-files apport.service >/dev/null; then
|
||||
execute_sudo "$systemctl" disable --now apport.service
|
||||
fi
|
||||
fi
|
||||
|
||||
# load the new configuration
|
||||
execute_sudo sysctl -p "$sysctl_file"
|
||||
|
||||
# ensure that a regular user will be able to run sysctl
|
||||
if [ -d /sbin ]; then
|
||||
append_to_path /sbin
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
clean_system() {
|
||||
if ! [ "$ci" = "1" ]; then
|
||||
return
|
||||
@@ -1357,6 +1453,8 @@ main() {
|
||||
install_build_essentials
|
||||
install_chromium
|
||||
install_fuse_python
|
||||
install_age
|
||||
configure_core_dumps
|
||||
clean_system
|
||||
}
|
||||
|
||||
|
||||
740
scripts/buildkite-failures.ts
Executable file
740
scripts/buildkite-failures.ts
Executable file
@@ -0,0 +1,740 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun";
|
||||
import { existsSync } from "fs";
|
||||
import { resolve } from "path";
|
||||
|
||||
// Check if we're in a TTY for color support
|
||||
const isTTY = process.stdout.isTTY || process.env.FORCE_COLOR === "1";
|
||||
|
||||
// Get git root directory
|
||||
let gitRoot = process.cwd();
|
||||
try {
|
||||
gitRoot = (await $`git rev-parse --show-toplevel`.quiet().text()).trim();
|
||||
} catch {
|
||||
// Fall back to current directory if not in a git repo
|
||||
}
|
||||
|
||||
// Helper to convert file path to file:// URL if it exists
|
||||
function fileToUrl(filePath) {
|
||||
try {
|
||||
// Extract just the file path without line numbers or other info
|
||||
const match = filePath.match(/^([^\s:]+\.(ts|js|tsx|jsx|zig))/);
|
||||
if (!match) return filePath;
|
||||
|
||||
const cleanPath = match[1];
|
||||
const fullPath = resolve(gitRoot, cleanPath);
|
||||
|
||||
if (existsSync(fullPath)) {
|
||||
return `file://${fullPath}`;
|
||||
}
|
||||
} catch (error) {
|
||||
// If anything fails, just return the original path
|
||||
}
|
||||
|
||||
return filePath;
|
||||
}
|
||||
|
||||
// Color codes - simpler color scheme
|
||||
const colors = {
|
||||
reset: isTTY ? "\x1b[0m" : "",
|
||||
bold: isTTY ? "\x1b[1m" : "",
|
||||
dim: isTTY ? "\x1b[2m" : "",
|
||||
red: isTTY ? "\x1b[31m" : "",
|
||||
green: isTTY ? "\x1b[32m" : "",
|
||||
bgBlue: isTTY ? "\x1b[44m" : "",
|
||||
bgRed: isTTY ? "\x1b[41m" : "",
|
||||
white: isTTY ? "\x1b[97m" : "",
|
||||
};
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const showWarnings = args.includes("--warnings") || args.includes("-w");
|
||||
const showFlaky = args.includes("--flaky") || args.includes("-f");
|
||||
const inputArg = args.find(arg => !arg.startsWith("-"));
|
||||
|
||||
// Determine what type of input we have
|
||||
let buildNumber = null;
|
||||
let branch = null;
|
||||
|
||||
if (inputArg) {
|
||||
// BuildKite URL
|
||||
if (inputArg.includes("buildkite.com")) {
|
||||
const buildMatch = inputArg.match(/builds\/(\d+)/);
|
||||
if (buildMatch) {
|
||||
buildNumber = buildMatch[1];
|
||||
}
|
||||
}
|
||||
// GitHub PR URL
|
||||
else if (inputArg.includes("github.com") && inputArg.includes("/pull/")) {
|
||||
const prMatch = inputArg.match(/pull\/(\d+)/);
|
||||
if (prMatch) {
|
||||
// Fetch PR info from GitHub API
|
||||
const prNumber = prMatch[1];
|
||||
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
|
||||
if (prResponse.ok) {
|
||||
const pr = await prResponse.json();
|
||||
branch = pr.head.ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Plain number or #number - assume it's a GitHub PR
|
||||
else if (/^#?\d+$/.test(inputArg)) {
|
||||
const prNumber = inputArg.replace("#", "");
|
||||
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
|
||||
if (prResponse.ok) {
|
||||
const pr = await prResponse.json();
|
||||
branch = pr.head.ref;
|
||||
} else {
|
||||
// If not a valid PR, maybe it's a BuildKite build number
|
||||
buildNumber = prNumber;
|
||||
}
|
||||
}
|
||||
// Otherwise assume it's a branch name
|
||||
else {
|
||||
branch = inputArg;
|
||||
}
|
||||
} else {
|
||||
// No input, use current branch
|
||||
branch = (await $`git rev-parse --abbrev-ref HEAD`.text()).trim();
|
||||
}
|
||||
|
||||
// If branch specified, find latest build
|
||||
if (!buildNumber) {
|
||||
const buildsUrl = `https://buildkite.com/bun/bun/builds?branch=${encodeURIComponent(branch)}`;
|
||||
const response = await fetch(buildsUrl);
|
||||
const html = await response.text();
|
||||
const match = html.match(/\/bun\/bun\/builds\/(\d+)/);
|
||||
|
||||
if (!match) {
|
||||
console.log(`No builds found for branch: ${branch}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
buildNumber = match[1];
|
||||
}
|
||||
|
||||
// Fetch build JSON
|
||||
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
|
||||
const build = await buildResponse.json();
|
||||
|
||||
// Calculate time ago
|
||||
const buildTime = new Date(build.started_at);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - buildTime.getTime();
|
||||
const diffSecs = Math.floor(diffMs / 1000);
|
||||
const diffMins = Math.floor(diffSecs / 60);
|
||||
const diffHours = Math.floor(diffMins / 60);
|
||||
const diffDays = Math.floor(diffHours / 24);
|
||||
|
||||
let timeAgo;
|
||||
if (diffDays > 0) {
|
||||
timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
|
||||
} else if (diffHours > 0) {
|
||||
timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
|
||||
} else if (diffMins > 0) {
|
||||
timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
|
||||
} else {
|
||||
timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
|
||||
}
|
||||
|
||||
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
|
||||
|
||||
// Check if build passed
|
||||
if (build.state === "passed") {
|
||||
console.log(`${colors.green}✅ Passed!${colors.reset}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Get failed jobs
|
||||
const failedJobs =
|
||||
build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || [];
|
||||
|
||||
// Platform emoji mapping
|
||||
const platformMap = {
|
||||
"darwin": "🍎",
|
||||
"macos": "🍎",
|
||||
"ubuntu": "🐧",
|
||||
"debian": "🐧",
|
||||
"alpine": "🐧",
|
||||
"linux": "🐧",
|
||||
"windows": "🪟",
|
||||
"win": "🪟",
|
||||
};
|
||||
|
||||
// Fetch annotations by scraping the build page
|
||||
const pageResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}`);
|
||||
const pageHtml = await pageResponse.text();
|
||||
|
||||
// Extract script tags using HTMLRewriter
|
||||
let annotationsData = null;
|
||||
const scriptContents: string[] = [];
|
||||
|
||||
const scriptRewriter = new HTMLRewriter().on("script", {
|
||||
text(text) {
|
||||
scriptContents.push(text.text);
|
||||
},
|
||||
});
|
||||
|
||||
await new Response(scriptRewriter.transform(new Response(pageHtml))).text();
|
||||
|
||||
// Find the registerRequest call in script contents
|
||||
const fullScript = scriptContents.join("");
|
||||
let registerRequestIndex = fullScript.indexOf("registerRequest");
|
||||
|
||||
// Find the AnnotationsListRendererQuery after registerRequest
|
||||
if (registerRequestIndex !== -1) {
|
||||
const afterRegisterRequest = fullScript.substring(registerRequestIndex);
|
||||
const annotationsIndex = afterRegisterRequest.indexOf('"AnnotationsListRendererQuery"');
|
||||
if (annotationsIndex === -1 || annotationsIndex > 100) {
|
||||
// Not the right registerRequest call
|
||||
registerRequestIndex = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (registerRequestIndex !== -1) {
|
||||
try {
|
||||
// Find the start of the JSON object (after the comma and any whitespace)
|
||||
let jsonStart = registerRequestIndex;
|
||||
|
||||
// Skip to the opening brace, accounting for the function name and first parameter
|
||||
let commaFound = false;
|
||||
for (let i = registerRequestIndex; i < fullScript.length; i++) {
|
||||
if (fullScript[i] === "," && !commaFound) {
|
||||
commaFound = true;
|
||||
} else if (commaFound && fullScript[i] === "{") {
|
||||
jsonStart = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the matching closing brace, considering strings
|
||||
let braceCount = 0;
|
||||
let jsonEnd = jsonStart;
|
||||
let inString = false;
|
||||
let escapeNext = false;
|
||||
|
||||
for (let i = jsonStart; i < fullScript.length; i++) {
|
||||
const char = fullScript[i];
|
||||
|
||||
if (escapeNext) {
|
||||
escapeNext = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === "\\") {
|
||||
escapeNext = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '"' && !inString) {
|
||||
inString = true;
|
||||
} else if (char === '"' && inString) {
|
||||
inString = false;
|
||||
}
|
||||
|
||||
if (!inString) {
|
||||
if (char === "{") braceCount++;
|
||||
else if (char === "}") {
|
||||
braceCount--;
|
||||
if (braceCount === 0) {
|
||||
jsonEnd = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const jsonString = fullScript.substring(jsonStart, jsonEnd);
|
||||
annotationsData = JSON.parse(jsonString);
|
||||
const edges = annotationsData?.build?.annotations?.edges || [];
|
||||
|
||||
// Just collect all unique annotations by context
|
||||
const annotationsByContext = new Map();
|
||||
|
||||
for (const edge of edges) {
|
||||
const node = edge.node;
|
||||
if (!node || !node.context) continue;
|
||||
|
||||
// Skip if we already have this context
|
||||
if (annotationsByContext.has(node.context)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
annotationsByContext.set(node.context, {
|
||||
context: node.context,
|
||||
html: node.body?.html || "",
|
||||
});
|
||||
}
|
||||
|
||||
// Collect annotations
|
||||
const annotations = Array.from(annotationsByContext.values());
|
||||
|
||||
// Group annotations by test file to detect duplicates
|
||||
const annotationsByFile = new Map();
|
||||
const nonFileAnnotations = [];
|
||||
|
||||
for (const annotation of annotations) {
|
||||
// Check if this is a file-based annotation
|
||||
const isFileAnnotation = annotation.context.match(/\.(ts|js|tsx|jsx|zig)$/);
|
||||
|
||||
if (isFileAnnotation) {
|
||||
// Parse the HTML to extract all platform sections
|
||||
const html = annotation.html || "";
|
||||
|
||||
// Check if this annotation contains multiple <details> sections (one per platform)
|
||||
const detailsSections = html.match(/<details>[\s\S]*?<\/details>/g);
|
||||
|
||||
if (detailsSections && detailsSections.length > 1) {
|
||||
// Multiple platform failures in one annotation
|
||||
for (const section of detailsSections) {
|
||||
const summaryMatch = section.match(
|
||||
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
|
||||
);
|
||||
|
||||
if (summaryMatch) {
|
||||
const filePath = summaryMatch[1];
|
||||
const failureInfo = summaryMatch[2];
|
||||
const platformHtml = summaryMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
|
||||
|
||||
const fileKey = `${filePath}|${failureInfo}`;
|
||||
if (!annotationsByFile.has(fileKey)) {
|
||||
annotationsByFile.set(fileKey, {
|
||||
filePath,
|
||||
failureInfo,
|
||||
platforms: [],
|
||||
htmlParts: [],
|
||||
originalAnnotations: [],
|
||||
});
|
||||
}
|
||||
|
||||
const entry = annotationsByFile.get(fileKey);
|
||||
entry.platforms.push(platform);
|
||||
entry.htmlParts.push(section);
|
||||
entry.originalAnnotations.push({
|
||||
...annotation,
|
||||
html: section,
|
||||
originalHtml: html,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Single platform failure
|
||||
const summaryMatch = html.match(
|
||||
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
|
||||
);
|
||||
|
||||
if (summaryMatch) {
|
||||
const filePath = summaryMatch[1];
|
||||
const failureInfo = summaryMatch[2];
|
||||
const platformHtml = summaryMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
|
||||
|
||||
const fileKey = `${filePath}|${failureInfo}`;
|
||||
if (!annotationsByFile.has(fileKey)) {
|
||||
annotationsByFile.set(fileKey, {
|
||||
filePath,
|
||||
failureInfo,
|
||||
platforms: [],
|
||||
htmlParts: [],
|
||||
originalAnnotations: [],
|
||||
});
|
||||
}
|
||||
|
||||
const entry = annotationsByFile.get(fileKey);
|
||||
entry.platforms.push(platform);
|
||||
entry.htmlParts.push(html);
|
||||
entry.originalAnnotations.push(annotation);
|
||||
} else {
|
||||
// Couldn't parse, treat as non-file annotation
|
||||
nonFileAnnotations.push(annotation);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Non-file annotations (like "zig error")
|
||||
nonFileAnnotations.push(annotation);
|
||||
}
|
||||
}
|
||||
|
||||
// Create merged annotations
|
||||
const mergedAnnotations = [];
|
||||
|
||||
// Add file-based annotations
|
||||
for (const [key, entry] of annotationsByFile) {
|
||||
const { filePath, failureInfo, platforms, htmlParts, originalAnnotations } = entry;
|
||||
|
||||
// If we have multiple platforms with the same content, merge them
|
||||
if (platforms.length > 1) {
|
||||
// Create context string with all platforms
|
||||
const uniquePlatforms = [...new Set(platforms)];
|
||||
const context = `${filePath} - ${failureInfo} on ${uniquePlatforms.join(", ")}`;
|
||||
|
||||
// Check if all HTML parts are identical
|
||||
const firstHtml = htmlParts[0];
|
||||
const allSame = htmlParts.every(html => html === firstHtml);
|
||||
|
||||
let mergedHtml = "";
|
||||
if (allSame) {
|
||||
// If all the same, just use the first one
|
||||
mergedHtml = firstHtml;
|
||||
} else {
|
||||
// If different, try to find one with the most color spans
|
||||
let bestHtml = firstHtml;
|
||||
let maxColorCount = (firstHtml.match(/term-fg/g) || []).length;
|
||||
|
||||
for (const html of htmlParts) {
|
||||
const colorCount = (html.match(/term-fg/g) || []).length;
|
||||
if (colorCount > maxColorCount) {
|
||||
maxColorCount = colorCount;
|
||||
bestHtml = html;
|
||||
}
|
||||
}
|
||||
mergedHtml = bestHtml;
|
||||
}
|
||||
|
||||
mergedAnnotations.push({
|
||||
context,
|
||||
html: mergedHtml,
|
||||
merged: true,
|
||||
platformCount: uniquePlatforms.length,
|
||||
});
|
||||
} else {
|
||||
// Single platform, use original
|
||||
mergedAnnotations.push(originalAnnotations[0]);
|
||||
}
|
||||
}
|
||||
|
||||
// Add non-file annotations
|
||||
mergedAnnotations.push(...nonFileAnnotations);
|
||||
|
||||
// Sort annotations: ones with colors at the bottom
|
||||
const annotationsWithColorInfo = mergedAnnotations.map(annotation => {
|
||||
const html = annotation.html || "";
|
||||
const hasColors = html.includes("term-fg") || html.includes("\\x1b[");
|
||||
return { annotation, hasColors };
|
||||
});
|
||||
|
||||
// Sort: no colors first, then colors
|
||||
annotationsWithColorInfo.sort((a, b) => {
|
||||
if (a.hasColors === b.hasColors) return 0;
|
||||
return a.hasColors ? 1 : -1;
|
||||
});
|
||||
|
||||
const sortedAnnotations = annotationsWithColorInfo.map(item => item.annotation);
|
||||
|
||||
// Count failures - look for actual test counts in the content
|
||||
let totalFailures = 0;
|
||||
let totalFlaky = 0;
|
||||
|
||||
// First try to count from annotations
|
||||
for (const annotation of sortedAnnotations) {
|
||||
const isFlaky = annotation.context.toLowerCase().includes("flaky");
|
||||
const html = annotation.html || "";
|
||||
|
||||
// Look for patterns like "X tests failed" or "X failing"
|
||||
const failureMatches = html.match(/(\d+)\s+(tests?\s+failed|failing)/gi);
|
||||
if (failureMatches) {
|
||||
for (const match of failureMatches) {
|
||||
const count = parseInt(match.match(/\d+/)[0]);
|
||||
if (isFlaky) {
|
||||
totalFlaky += count;
|
||||
} else {
|
||||
totalFailures += count;
|
||||
}
|
||||
break; // Only count first match to avoid duplicates
|
||||
}
|
||||
} else if (!isFlaky) {
|
||||
// If no count found, count the annotation itself
|
||||
totalFailures++;
|
||||
}
|
||||
}
|
||||
|
||||
// If no annotations, use job count
|
||||
if (totalFailures === 0 && failedJobs.length > 0) {
|
||||
totalFailures = failedJobs.length;
|
||||
}
|
||||
|
||||
// Display failure count
|
||||
if (totalFailures > 0 || totalFlaky > 0) {
|
||||
if (totalFailures > 0) {
|
||||
console.log(`\n${colors.red}${colors.bold}${totalFailures} test failures${colors.reset}`);
|
||||
}
|
||||
if (showFlaky && totalFlaky > 0) {
|
||||
console.log(`${colors.dim}${totalFlaky} flaky tests${colors.reset}`);
|
||||
}
|
||||
console.log();
|
||||
} else if (failedJobs.length > 0) {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
}
|
||||
|
||||
// Display all annotations
|
||||
console.log();
|
||||
for (const annotation of sortedAnnotations) {
|
||||
// Skip flaky tests unless --flaky flag is set
|
||||
if (!showFlaky && annotation.context.toLowerCase().includes("flaky")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Display context header with background color
|
||||
// For merged annotations, show platform info
|
||||
if (annotation.merged && annotation.platformCount) {
|
||||
// Extract filename and failure info from context
|
||||
const contextParts = annotation.context.match(/^(.+?)\s+-\s+(.+?)\s+on\s+(.+)$/);
|
||||
if (contextParts) {
|
||||
const [, filename, failureInfo, platformsStr] = contextParts;
|
||||
const fileUrl = fileToUrl(filename);
|
||||
console.log(
|
||||
`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platformsStr}${colors.reset}`,
|
||||
);
|
||||
} else {
|
||||
const fileUrl = fileToUrl(annotation.context);
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
|
||||
}
|
||||
} else {
|
||||
// Single annotation - need to extract platform info from HTML
|
||||
const fileUrl = fileToUrl(annotation.context);
|
||||
|
||||
// Try to extract platform info from the HTML for single platform tests
|
||||
const html = annotation.html || "";
|
||||
const singlePlatformMatch = html.match(
|
||||
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
|
||||
);
|
||||
|
||||
if (singlePlatformMatch) {
|
||||
const failureInfo = singlePlatformMatch[2];
|
||||
const platformHtml = singlePlatformMatch[3];
|
||||
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
|
||||
console.log(
|
||||
`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platform}${colors.reset}`,
|
||||
);
|
||||
} else {
|
||||
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
|
||||
}
|
||||
}
|
||||
console.log();
|
||||
|
||||
// Process the annotation HTML to preserve colors
|
||||
const html = annotation.html || "";
|
||||
|
||||
// First unescape unicode sequences
|
||||
let unescapedHtml = html
|
||||
.replace(/\\u003c/g, "<")
|
||||
.replace(/\\u003e/g, ">")
|
||||
.replace(/\\u0026/g, "&")
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\'/g, "'")
|
||||
.replace(/\\u001b/g, "\x1b"); // Unescape ANSI escape sequences
|
||||
|
||||
// Handle newlines more carefully - BuildKite sometimes has actual newlines that shouldn't be there
|
||||
// Only replace \n if it's actually an escaped newline, not part of the content
|
||||
unescapedHtml = unescapedHtml.replace(/\\n/g, "\n");
|
||||
|
||||
// Also handle escaped ANSI sequences that might appear as \\x1b or \033
|
||||
unescapedHtml = unescapedHtml.replace(/\\\\x1b/g, "\x1b").replace(/\\033/g, "\x1b");
|
||||
|
||||
// Convert HTML with ANSI color classes to actual ANSI codes
|
||||
const termColors = {
|
||||
// Standard colors (0-7)
|
||||
"term-fg0": "\x1b[30m", // black
|
||||
"term-fg1": "\x1b[31m", // red
|
||||
"term-fg2": "\x1b[32m", // green
|
||||
"term-fg3": "\x1b[33m", // yellow
|
||||
"term-fg4": "\x1b[34m", // blue
|
||||
"term-fg5": "\x1b[35m", // magenta
|
||||
"term-fg6": "\x1b[36m", // cyan
|
||||
"term-fg7": "\x1b[37m", // white
|
||||
// Also support 30-37 format
|
||||
"term-fg30": "\x1b[30m", // black
|
||||
"term-fg31": "\x1b[31m", // red
|
||||
"term-fg32": "\x1b[32m", // green
|
||||
"term-fg33": "\x1b[33m", // yellow
|
||||
"term-fg34": "\x1b[34m", // blue
|
||||
"term-fg35": "\x1b[35m", // magenta
|
||||
"term-fg36": "\x1b[36m", // cyan
|
||||
"term-fg37": "\x1b[37m", // white
|
||||
// Bright colors with 'i' prefix
|
||||
"term-fgi90": "\x1b[90m", // bright black
|
||||
"term-fgi91": "\x1b[91m", // bright red
|
||||
"term-fgi92": "\x1b[92m", // bright green
|
||||
"term-fgi93": "\x1b[93m", // bright yellow
|
||||
"term-fgi94": "\x1b[94m", // bright blue
|
||||
"term-fgi95": "\x1b[95m", // bright magenta
|
||||
"term-fgi96": "\x1b[96m", // bright cyan
|
||||
"term-fgi97": "\x1b[97m", // bright white
|
||||
// Also support without 'i'
|
||||
"term-fg90": "\x1b[90m", // bright black
|
||||
"term-fg91": "\x1b[91m", // bright red
|
||||
"term-fg92": "\x1b[92m", // bright green
|
||||
"term-fg93": "\x1b[93m", // bright yellow
|
||||
"term-fg94": "\x1b[94m", // bright blue
|
||||
"term-fg95": "\x1b[95m", // bright magenta
|
||||
"term-fg96": "\x1b[96m", // bright cyan
|
||||
"term-fg97": "\x1b[97m", // bright white
|
||||
// Background colors
|
||||
"term-bg40": "\x1b[40m", // black
|
||||
"term-bg41": "\x1b[41m", // red
|
||||
"term-bg42": "\x1b[42m", // green
|
||||
"term-bg43": "\x1b[43m", // yellow
|
||||
"term-bg44": "\x1b[44m", // blue
|
||||
"term-bg45": "\x1b[45m", // magenta
|
||||
"term-bg46": "\x1b[46m", // cyan
|
||||
"term-bg47": "\x1b[47m", // white
|
||||
// Text styles
|
||||
"term-bold": "\x1b[1m",
|
||||
"term-dim": "\x1b[2m",
|
||||
"term-italic": "\x1b[3m",
|
||||
"term-underline": "\x1b[4m",
|
||||
};
|
||||
|
||||
let text = unescapedHtml;
|
||||
|
||||
// Convert color spans to ANSI codes if TTY
|
||||
if (isTTY) {
|
||||
// Convert spans with color classes to ANSI codes
|
||||
for (const [className, ansiCode] of Object.entries(termColors)) {
|
||||
// Match spans that contain the class name (might have multiple classes)
|
||||
// Need to handle both formats: <span class="..."> and <span ... class="...">
|
||||
const regex = new RegExp(`<span[^>]*class="[^"]*\\b${className}\\b[^"]*"[^>]*>([\\s\\S]*?)</span>`, "g");
|
||||
text = text.replace(regex, (match, content) => {
|
||||
// Don't add reset if the content already has ANSI codes
|
||||
if (content.includes("\x1b[")) {
|
||||
return `${ansiCode}${content}`;
|
||||
}
|
||||
return `${ansiCode}${content}${colors.reset}`;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we already have ANSI codes in the text after processing
|
||||
const hasExistingAnsi = text.includes("\x1b[");
|
||||
|
||||
// Check for broken color patterns (single characters wrapped in colors)
|
||||
// If we see patterns like green[, red text, green], it's likely broken
|
||||
// Also check for patterns like: green[, then reset, then text, then red text, then reset, then green]
|
||||
const hasBrokenColors =
|
||||
text.includes("\x1b[32m[") ||
|
||||
text.includes("\x1b[32m]") ||
|
||||
(text.includes("\x1b[32m✓") && text.includes("\x1b[31m") && text.includes("ms]"));
|
||||
|
||||
if (hasBrokenColors) {
|
||||
// Remove all ANSI codes if the coloring looks broken
|
||||
text = text.replace(/\x1b\[[0-9;]*m/g, "");
|
||||
}
|
||||
|
||||
// Remove all HTML tags, but be careful with existing ANSI codes
|
||||
text = text
|
||||
.replace(/<pre[^>]*><code[^>]*>([\s\S]*?)<\/code><\/pre>/g, "$1")
|
||||
.replace(/<br\s*\/?>/g, "\n")
|
||||
.replace(/<\/p>/g, "\n")
|
||||
.replace(/<p>/g, "")
|
||||
.replace(/<[^>]+>/g, "")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/&/g, "&")
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/ /g, " ")
|
||||
.replace(/\u00A0/g, " ") // Non-breaking space
|
||||
.trim();
|
||||
|
||||
// Remove excessive blank lines - be more aggressive
|
||||
text = text.replace(/\n\s*\n\s*\n+/g, "\n\n"); // Replace 3+ newlines with 2
|
||||
text = text.replace(/\n\s*\n/g, "\n"); // Replace 2 newlines with 1
|
||||
|
||||
// For zig error annotations, check if there are multiple platform sections
|
||||
let handled = false;
|
||||
if (annotation.context.includes("zig error")) {
|
||||
// Split by platform headers within the content
|
||||
const platformSections = text.split(/(?=^\s*[^\s\/]+\.zig\s*-\s*zig error\s+on\s+)/m);
|
||||
|
||||
if (platformSections.length > 1) {
|
||||
// Skip the first empty section if it exists
|
||||
const sections = platformSections.filter(s => s.trim());
|
||||
|
||||
if (sections.length > 1) {
|
||||
// We have multiple platform errors in one annotation
|
||||
// Extract unique platform names
|
||||
const platforms = [];
|
||||
for (const section of sections) {
|
||||
const platformMatch = section.match(/on\s+(\S+)/);
|
||||
if (platformMatch) {
|
||||
platforms.push(platformMatch[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// Show combined header with background color
|
||||
const filename = annotation.context;
|
||||
const fileUrl = fileToUrl(filename);
|
||||
const platformText = platforms.join(", ");
|
||||
console.log(
|
||||
`${colors.bgRed}${colors.white}${colors.bold} ${fileUrl} ${colors.reset} ${colors.dim}on ${platformText}${colors.reset}`,
|
||||
);
|
||||
console.log();
|
||||
|
||||
// Show only the first error detail (they're the same)
|
||||
const firstError = sections[0];
|
||||
const errorLines = firstError.split("\n");
|
||||
|
||||
// Skip the platform-specific header line and remove excessive blank lines
|
||||
let previousWasBlank = false;
|
||||
for (let i = 0; i < errorLines.length; i++) {
|
||||
const line = errorLines[i];
|
||||
if (i === 0 && line.match(/\.zig\s*-\s*zig error\s+on\s+/)) {
|
||||
continue; // Skip platform header
|
||||
}
|
||||
|
||||
// Skip multiple consecutive blank lines
|
||||
const isBlank = line.trim() === "";
|
||||
if (isBlank && previousWasBlank) {
|
||||
continue;
|
||||
}
|
||||
previousWasBlank = isBlank;
|
||||
|
||||
console.log(line); // No indentation
|
||||
}
|
||||
console.log();
|
||||
handled = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Normal processing for other annotations
|
||||
if (!handled) {
|
||||
// For merged annotations, skip the duplicate headers within the content
|
||||
const isMerged = annotation.merged || (annotation.platformCount && annotation.platformCount > 1);
|
||||
|
||||
// Process lines, removing excessive blank lines
|
||||
let previousWasBlank = false;
|
||||
text.split("\n").forEach((line, index) => {
|
||||
// For merged annotations, skip duplicate platform headers
|
||||
if (
|
||||
isMerged &&
|
||||
index > 0 &&
|
||||
line.match(/^[^\s\/]+\.(ts|js|tsx|jsx|zig)\s*-\s*\d+\s+(failing|errors?|warnings?)\s+on\s+/)
|
||||
) {
|
||||
return; // Skip duplicate headers in merged content
|
||||
}
|
||||
|
||||
// Skip multiple consecutive blank lines
|
||||
const isBlank = line.trim() === "";
|
||||
if (isBlank && previousWasBlank) {
|
||||
return;
|
||||
}
|
||||
previousWasBlank = isBlank;
|
||||
|
||||
console.log(line); // No indentation
|
||||
});
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed to parse annotations:", e);
|
||||
console.log("\nView detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
} else {
|
||||
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
|
||||
console.log("View detailed results at:");
|
||||
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
|
||||
}
|
||||
63
scripts/debug-coredump.ts
Normal file
63
scripts/debug-coredump.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import fs from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { basename, join } from "node:path";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
// usage: bun debug-coredump.ts
|
||||
// -p <PID of the test that crashed> (buildkite should show this)
|
||||
// -b <URL to the bun-profile.zip artifact for the appropriate platform>
|
||||
// -c <URL to the bun-cores.tar.gz.age artifact for the appropriate platform>
|
||||
// -d <debugger> (default: lldb)
|
||||
const {
|
||||
values: { pid: stringPid, ["build-url"]: buildUrl, ["cores-url"]: coresUrl, debugger: debuggerPath },
|
||||
} = parseArgs({
|
||||
options: {
|
||||
pid: { type: "string", short: "p" },
|
||||
["build-url"]: { type: "string", short: "b" },
|
||||
["cores-url"]: { type: "string", short: "c" },
|
||||
debugger: { type: "string", short: "d", default: "lldb" },
|
||||
},
|
||||
});
|
||||
|
||||
if (stringPid === undefined) throw new Error("no PID given");
|
||||
const pid = parseInt(stringPid);
|
||||
if (buildUrl === undefined) throw new Error("no build-url given");
|
||||
if (coresUrl === undefined) throw new Error("no cores-url given");
|
||||
if (!process.env.AGE_CORES_IDENTITY?.startsWith("AGE-SECRET-KEY-"))
|
||||
throw new Error("no identity given in $AGE_CORES_IDENTITY");
|
||||
|
||||
const id = Bun.hash(buildUrl + coresUrl).toString(36);
|
||||
const dir = join(tmpdir(), `debug-coredump-${id}.tmp`);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
|
||||
if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${pid}.core`))) {
|
||||
console.log("downloading bun-profile.zip");
|
||||
const zip = await (await fetch(buildUrl)).arrayBuffer();
|
||||
await Bun.write(join(dir, "bun-profile.zip"), zip);
|
||||
// -j: junk paths (don't create directories when extracting)
|
||||
// -o: overwrite without prompting
|
||||
// -d: extract to this directory instead of cwd
|
||||
await Bun.$`unzip -j -o ${join(dir, "bun-profile.zip")} -d ${dir}`;
|
||||
|
||||
console.log("downloading cores");
|
||||
const cores = await (await fetch(coresUrl)).arrayBuffer();
|
||||
await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`;
|
||||
|
||||
console.log("moving cores out of nested directory");
|
||||
for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) {
|
||||
fs.renameSync(join(dir, file), join(dir, basename(file)));
|
||||
}
|
||||
} else {
|
||||
console.log(`already downloaded in ${dir}`);
|
||||
}
|
||||
|
||||
console.log("launching debugger:");
|
||||
console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`);
|
||||
|
||||
const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], {
|
||||
stdin: "inherit",
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
});
|
||||
await proc.exited;
|
||||
process.exit(proc.exitCode);
|
||||
125
scripts/longest.js
Normal file
125
scripts/longest.js
Normal file
@@ -0,0 +1,125 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
// Regex patterns for different types of top-level declarations
|
||||
const DECLARATION_PATTERN =
|
||||
// pub? (export|extern)? (const|fn|var) name
|
||||
/^(pub\s+)?(export\s+|extern\s+)?(const|fn|var)\s+([a-zA-Z_][a-zA-Z0-9_]*)/;
|
||||
|
||||
function findDeclarations(filePath) {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
const lines = content.split("\n");
|
||||
const declarations = [];
|
||||
|
||||
// First pass: collect all declarations with their line numbers
|
||||
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
|
||||
const line = lines[lineNum];
|
||||
|
||||
// Skip empty lines and comments
|
||||
if (!line || line.trim().startsWith("//") || line.trim().startsWith("///")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only process top-level declarations (no indentation)
|
||||
if (line.startsWith(" ") || line.startsWith("\t")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const trimmedLine = line.trim();
|
||||
|
||||
// Check each pattern
|
||||
const match = trimmedLine.match(DECLARATION_PATTERN);
|
||||
if (match) {
|
||||
// Extract the name from the match
|
||||
const name = match[match.length - 1]; // Last capture group is the name
|
||||
|
||||
declarations.push({
|
||||
name,
|
||||
match: match[0],
|
||||
line: lineNum + 1,
|
||||
type: getDeclarationType(match[0]),
|
||||
fullLine: trimmedLine,
|
||||
startLine: lineNum,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: calculate sizes based on next declaration's start line
|
||||
for (let i = 0; i < declarations.length; i++) {
|
||||
const currentDecl = declarations[i];
|
||||
const nextDecl = declarations[i + 1];
|
||||
|
||||
if (nextDecl) {
|
||||
// Size is from current declaration start to next declaration start
|
||||
currentDecl.size = nextDecl.startLine - currentDecl.startLine;
|
||||
} else {
|
||||
// Last declaration: size is from current declaration start to end of file
|
||||
currentDecl.size = lines.length - currentDecl.startLine;
|
||||
}
|
||||
}
|
||||
|
||||
return declarations;
|
||||
}
|
||||
|
||||
function getDeclarationType(matchText) {
|
||||
if (matchText.includes("const")) return "const";
|
||||
if (matchText.includes("fn")) return "fn";
|
||||
if (matchText.includes("var")) return "var";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("Usage: bun longest.js <zig-file>");
|
||||
console.error("Example: bun longest.js src/walker_skippable.zig");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filePath = args[0];
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.error(`File not found: ${filePath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!filePath.endsWith(".zig")) {
|
||||
console.error("Please provide a .zig file");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const declarations = findDeclarations(filePath);
|
||||
|
||||
if (declarations.length === 0) {
|
||||
console.log("No top-level declarations found.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${declarations.length} top-level declarations in ${filePath}:\n`);
|
||||
|
||||
// Sort by declaration size (smallest first)
|
||||
declarations.sort((a, b) => a.size - b.size);
|
||||
|
||||
// Find the longest name for formatting
|
||||
const maxNameLength = Math.max(...declarations.map(d => d.match.length));
|
||||
const maxTypeLength = Math.max(...declarations.map(d => d.type.length));
|
||||
|
||||
console.log(`${"Name".padEnd(maxNameLength + 2)} ${"Type".padEnd(maxTypeLength + 2)} ${"Num Lines".padEnd(6)}`);
|
||||
console.log("-".repeat(maxNameLength + maxTypeLength + 15));
|
||||
|
||||
declarations.forEach(decl => {
|
||||
console.log(
|
||||
`${decl.match.padEnd(maxNameLength + 2)} ${decl.type.padEnd(maxTypeLength + 2)} ${decl.size.toString().padEnd(6)}`,
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error reading file:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
@@ -51,6 +51,7 @@ import {
|
||||
isBuildkite,
|
||||
isCI,
|
||||
isGithubAction,
|
||||
isLinux,
|
||||
isMacOS,
|
||||
isWindows,
|
||||
isX64,
|
||||
@@ -59,6 +60,7 @@ import {
|
||||
startGroup,
|
||||
tmpdir,
|
||||
unzip,
|
||||
uploadArtifact,
|
||||
} from "./utils.mjs";
|
||||
let isQuiet = false;
|
||||
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
|
||||
@@ -146,6 +148,10 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "boolean",
|
||||
default: isBuildkite,
|
||||
},
|
||||
["coredump-upload"]: {
|
||||
type: "boolean",
|
||||
default: isBuildkite && isLinux,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -230,6 +236,27 @@ function getTestExpectations() {
|
||||
return expectations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateExceptions = (() => {
|
||||
let skipArray;
|
||||
return test => {
|
||||
if (!skipArray) {
|
||||
const path = join(cwd, "test/no-validate-exceptions.txt");
|
||||
if (!existsSync(path)) {
|
||||
skipArray = [];
|
||||
}
|
||||
skipArray = readFileSync(path, "utf-8")
|
||||
.split("\n")
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
}
|
||||
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
|
||||
};
|
||||
})();
|
||||
|
||||
/**
|
||||
* @param {string} testPath
|
||||
* @returns {string[]}
|
||||
@@ -416,16 +443,20 @@ async function runTests() {
|
||||
const runWithBunTest =
|
||||
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
|
||||
const subcommand = runWithBunTest ? "test" : "run";
|
||||
const env = {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
env: {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
@@ -580,6 +611,78 @@ async function runTests() {
|
||||
}
|
||||
}
|
||||
|
||||
if (options["coredump-upload"]) {
|
||||
try {
|
||||
// this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch
|
||||
const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] });
|
||||
let coresDir = sysctl.stdout;
|
||||
if (sysctl.ok) {
|
||||
if (coresDir.startsWith("|")) {
|
||||
throw new Error("cores are being piped not saved");
|
||||
}
|
||||
// change /foo/bar/%e-%p.core to /foo/bar
|
||||
coresDir = dirname(sysctl.stdout);
|
||||
} else {
|
||||
throw new Error(`Failed to check core_pattern: ${sysctl.error}`);
|
||||
}
|
||||
|
||||
const coresDirBase = dirname(coresDir);
|
||||
const coresDirName = basename(coresDir);
|
||||
const coreFileNames = readdirSync(coresDir);
|
||||
|
||||
if (coreFileNames.length > 0) {
|
||||
console.log(`found ${coreFileNames.length} cores in ${coresDir}`);
|
||||
let totalBytes = 0;
|
||||
let totalBlocks = 0;
|
||||
for (const f of coreFileNames) {
|
||||
const stat = statSync(join(coresDir, f));
|
||||
totalBytes += stat.size;
|
||||
totalBlocks += stat.blocks;
|
||||
}
|
||||
console.log(`total apparent size = ${totalBytes} bytes`);
|
||||
console.log(`total size on disk = ${512 * totalBlocks} bytes`);
|
||||
const outdir = mkdtempSync(join(tmpdir(), "cores-upload"));
|
||||
const outfileName = `${coresDirName}.tar.gz.age`;
|
||||
const outfileAbs = join(outdir, outfileName);
|
||||
|
||||
// This matches an age identity known by Bun employees. Core dumps from CI have to be kept
|
||||
// secret since they will contain API keys.
|
||||
const ageRecipient = "age1eunsrgxwjjpzr48hm0y98cw2vn5zefjagt4r0qj4503jg2nxedqqkmz6fu"; // reject external PRs changing this, see above
|
||||
|
||||
// Run tar in the parent directory of coresDir so that it creates archive entries with
|
||||
// coresDirName in them. This way when you extract the tarball you get a folder named
|
||||
// bun-cores-XYZ containing core files, instead of a bunch of core files strewn in your
|
||||
// current directory
|
||||
const before = Date.now();
|
||||
const zipAndEncrypt = await spawnSafe({
|
||||
command: "bash",
|
||||
args: [
|
||||
"-c",
|
||||
// tar -S: handle sparse files efficiently
|
||||
`set -euo pipefail && tar -Sc "$0" | gzip -1 | age -e -r ${ageRecipient} -o "$1"`,
|
||||
// $0
|
||||
coresDirName,
|
||||
// $1
|
||||
outfileAbs,
|
||||
],
|
||||
cwd: coresDirBase,
|
||||
stdout: () => {},
|
||||
timeout: 60_000,
|
||||
});
|
||||
const elapsed = Date.now() - before;
|
||||
if (!zipAndEncrypt.ok) {
|
||||
throw new Error(zipAndEncrypt.error);
|
||||
}
|
||||
console.log(`saved core dumps to ${outfileAbs} (${statSync(outfileAbs).size} bytes) in ${elapsed} ms`);
|
||||
await uploadArtifact(outfileAbs);
|
||||
} else {
|
||||
console.log(`no cores found in ${coresDir}`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error collecting and uploading core dumps:", err);
|
||||
}
|
||||
}
|
||||
|
||||
if (!isCI && !isQuiet) {
|
||||
console.table({
|
||||
"Total Tests": okResults.length + failedResults.length + flakyResults.length,
|
||||
@@ -755,6 +858,7 @@ async function spawnSafe(options) {
|
||||
const [, message] = error || [];
|
||||
error = message ? message.split("\n")[0].toLowerCase() : "crash";
|
||||
error = error.indexOf("\\n") !== -1 ? error.substring(0, error.indexOf("\\n")) : error;
|
||||
error = `pid ${subprocess.pid} ${error}`;
|
||||
} else if (signalCode) {
|
||||
if (signalCode === "SIGTERM" && duration >= timeout) {
|
||||
error = "timeout";
|
||||
@@ -846,7 +950,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
};
|
||||
|
||||
if (basename(execPath).includes("asan")) {
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1";
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0";
|
||||
}
|
||||
|
||||
if (isWindows && bunEnv.Path) {
|
||||
@@ -953,13 +1057,18 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
|
||||
testArgs.push(absPath);
|
||||
|
||||
const env = {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
};
|
||||
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
}
|
||||
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env: {
|
||||
GITHUB_ACTIONS: "true", // always true so annotations are parsed
|
||||
},
|
||||
env,
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
});
|
||||
@@ -993,7 +1102,7 @@ function getTestTimeout(testPath) {
|
||||
if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) {
|
||||
return integrationTimeout;
|
||||
}
|
||||
if (/napi/i.test(testPath)) {
|
||||
if (/napi/i.test(testPath) || /v8/i.test(testPath)) {
|
||||
return napiTimeout;
|
||||
}
|
||||
return testTimeout;
|
||||
|
||||
394
scripts/sortImports.ts
Normal file
394
scripts/sortImports.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
const filePaths = args.filter(arg => !arg.startsWith("-"));
|
||||
const usage = String.raw`
|
||||
__ .__ __
|
||||
____________________/ |_ _______|__| _____ ______ ____________/ |_ ______
|
||||
\___ / _ \_ __ \ __\ \___ / |/ \\____ \ / _ \_ __ \ __\/ ___/
|
||||
/ ( <_> ) | \/| | / /| | Y Y \ |_> > <_> ) | \/| | \___ \
|
||||
/_____ \____/|__| |__| /_____ \__|__|_| / __/ \____/|__| |__| /____ >
|
||||
\/ \/ \/|__| \/
|
||||
|
||||
Usage: bun scripts/sortImports [options] <files...>
|
||||
|
||||
Options:
|
||||
--help Show this help message
|
||||
--no-include-pub Exclude pub imports from sorting
|
||||
--no-remove-unused Don't remove unused imports
|
||||
--include-unsorted Process files even if they don't have @sortImports marker
|
||||
|
||||
Examples:
|
||||
bun scripts/sortImports src
|
||||
`.slice(1);
|
||||
if (args.includes("--help")) {
|
||||
console.log(usage);
|
||||
process.exit(0);
|
||||
}
|
||||
if (filePaths.length === 0) {
|
||||
console.error(usage);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const config = {
|
||||
includePub: !args.includes("--no-include-pub"),
|
||||
removeUnused: !args.includes("--no-remove-unused"),
|
||||
includeUnsorted: args.includes("--include-unsorted"),
|
||||
};
|
||||
|
||||
// Type definitions
|
||||
type Declaration = {
|
||||
index: number;
|
||||
key: string;
|
||||
value: string;
|
||||
segments: string[] | null;
|
||||
whole: string;
|
||||
last?: string;
|
||||
wholepath?: string[];
|
||||
};
|
||||
|
||||
// Parse declarations from the file
|
||||
function parseDeclarations(
|
||||
lines: string[],
|
||||
fileContents: string,
|
||||
): {
|
||||
declarations: Map<string, Declaration>;
|
||||
unusedLineIndices: number[];
|
||||
} {
|
||||
const declarations = new Map<string, Declaration>();
|
||||
const unusedLineIndices: number[] = [];
|
||||
|
||||
// for stability
|
||||
const sortedLineKeys = [...lines.keys()].sort((a, b) => (lines[a] < lines[b] ? -1 : lines[a] > lines[b] ? 1 : 0));
|
||||
|
||||
for (const i of sortedLineKeys) {
|
||||
const line = lines[i];
|
||||
|
||||
if (line === "// @sortImports") {
|
||||
lines[i] = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
|
||||
const match = line.match(inlineDeclPattern);
|
||||
|
||||
if (!match) continue;
|
||||
|
||||
const name = match[1];
|
||||
const value = match[2];
|
||||
|
||||
// Skip if the previous line has a doc comment
|
||||
const prevLine = lines[i - 1] ?? "";
|
||||
if (prevLine.startsWith("///")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip unused declarations (non-public declarations that appear only once)
|
||||
if (config.removeUnused && !line.includes("pub ")) {
|
||||
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const expectedCount = (line.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
|
||||
const actualCount = (fileContents.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
|
||||
if (expectedCount === actualCount) {
|
||||
// unused decl
|
||||
unusedLineIndices.push(i);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!config.includePub && line.includes("pub ")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
declarations.set(name, {
|
||||
whole: line,
|
||||
index: i,
|
||||
key: name,
|
||||
value,
|
||||
segments: parseSegments(value),
|
||||
});
|
||||
}
|
||||
|
||||
return { declarations, unusedLineIndices };
|
||||
}
|
||||
|
||||
// Validate if a segment is a valid identifier
|
||||
function isValidSegment(segment: string): boolean {
|
||||
if (segment.startsWith("@import(") || segment === "@This()") {
|
||||
return true;
|
||||
}
|
||||
return segment.match(/^[a-zA-Z0-9_]+$/) != null;
|
||||
}
|
||||
|
||||
// Parse import path segments from a value
|
||||
function parseSegments(value: string): null | string[] {
|
||||
if (value.startsWith("@import(")) {
|
||||
const rightBracketIndex = value.indexOf(")");
|
||||
if (rightBracketIndex === -1) return null;
|
||||
|
||||
const importPart = value.slice(0, rightBracketIndex + 1);
|
||||
const remainingPart = value.slice(rightBracketIndex + 1);
|
||||
|
||||
if (remainingPart.startsWith(".")) {
|
||||
const segments = remainingPart.slice(1).split(".");
|
||||
if (!segments.every(segment => isValidSegment(segment))) return null;
|
||||
return [importPart, ...segments];
|
||||
} else if (remainingPart === "") {
|
||||
return [importPart];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
const segments = value.split(".");
|
||||
if (!segments.every(segment => isValidSegment(segment))) return null;
|
||||
return segments;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the first segment of an import path
|
||||
function resolveFirstSegment(firstSegment: string, declarations: Map<string, Declaration>): null | string[] {
|
||||
if (firstSegment.startsWith("@import(") || firstSegment.startsWith("@This()")) {
|
||||
return [firstSegment];
|
||||
} else {
|
||||
const declaration = declarations.get(firstSegment);
|
||||
if (!declaration) {
|
||||
return null; // Unknown declaration
|
||||
}
|
||||
|
||||
const subFirstSegment = declaration.segments?.[0];
|
||||
if (!subFirstSegment) {
|
||||
return null; // Invalid declaration
|
||||
}
|
||||
|
||||
const resolvedSubFirst = resolveFirstSegment(subFirstSegment, declarations);
|
||||
if (!resolvedSubFirst) {
|
||||
return null; // Unable to resolve
|
||||
}
|
||||
|
||||
return [...resolvedSubFirst, ...(declaration.segments?.slice(1) ?? [])];
|
||||
}
|
||||
}
|
||||
|
||||
type Group = {
|
||||
keySegments: string[];
|
||||
declarations: Declaration[];
|
||||
};
|
||||
|
||||
// Group declarations by their import paths
|
||||
function groupDeclarationsByImportPath(declarations: Map<string, Declaration>): Map<string, Group> {
|
||||
const groups = new Map<string, Group>();
|
||||
|
||||
for (const declaration of declarations.values()) {
|
||||
if (!declaration.segments || declaration.segments.length < 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const firstSegment = declaration.segments[0];
|
||||
const resolvedFirst = resolveFirstSegment(firstSegment, declarations);
|
||||
|
||||
if (!resolvedFirst) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const remainingSegments = declaration.segments.slice(1);
|
||||
const fullPath = [...resolvedFirst, ...remainingSegments];
|
||||
const lastSegment = fullPath.pop();
|
||||
|
||||
if (!lastSegment) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const groupKey = fullPath.join(".");
|
||||
if (!groups.has(groupKey)) {
|
||||
groups.set(groupKey, { keySegments: fullPath, declarations: [] });
|
||||
}
|
||||
|
||||
groups.get(groupKey)!.declarations.push(declaration);
|
||||
declaration.last = lastSegment;
|
||||
declaration.wholepath = [...fullPath, lastSegment];
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
// Merge single-item groups into their parent groups
|
||||
function mergeSingleItemGroups(groups: Map<string, Group>): void {
|
||||
while (true) {
|
||||
let hasChanges = false;
|
||||
|
||||
for (const [groupKey, group] of groups.entries()) {
|
||||
if (group.declarations.length === 1) {
|
||||
const gcsplit = [...group.keySegments];
|
||||
while (gcsplit.pop()) {
|
||||
const parentKey = gcsplit.join(".");
|
||||
if (groups.has(parentKey)) {
|
||||
groups.get(parentKey)!.declarations.push(group.declarations[0]);
|
||||
groups.delete(groupKey);
|
||||
hasChanges = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasChanges) break;
|
||||
}
|
||||
}
|
||||
|
||||
// Move items with child groups to the top of those child groups
|
||||
function promoteItemsWithChildGroups(groups: Map<string, Group>): void {
|
||||
for (const [groupKey, group] of groups.entries()) {
|
||||
for (let i = 0; i < group.declarations.length; ) {
|
||||
const item = group.declarations[i];
|
||||
const childGroupKey = (groupKey ? groupKey + "." : "") + item.last;
|
||||
|
||||
if (groups.has(childGroupKey)) {
|
||||
groups.get(childGroupKey)!.declarations.unshift(item);
|
||||
group.declarations.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort groups and their declarations
|
||||
function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
|
||||
// Sort declarations within each group
|
||||
for (const group of groups.values()) {
|
||||
group.declarations.sort((a, b) => {
|
||||
if (a.wholepath?.length !== b.wholepath?.length) {
|
||||
return (a.wholepath?.length ?? 0) - (b.wholepath?.length ?? 0);
|
||||
}
|
||||
return a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Sort group keys alphabetically
|
||||
return Array.from(groups.keys()).sort((a, b) => {
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Generate the sorted output
|
||||
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
|
||||
const outputLines = [...lines];
|
||||
outputLines.push("");
|
||||
outputLines.push("// @sortImports");
|
||||
|
||||
for (const groupKey of sortedGroupKeys) {
|
||||
const groupDeclarations = groups.get(groupKey)!;
|
||||
if (!groupDeclarations?.declarations.length) continue;
|
||||
|
||||
// Add spacing between groups
|
||||
outputLines.push("");
|
||||
|
||||
// Add declarations to output and mark original lines for removal
|
||||
for (const declaration of groupDeclarations.declarations) {
|
||||
outputLines.push(declaration.whole);
|
||||
outputLines[declaration.index] = "";
|
||||
}
|
||||
}
|
||||
|
||||
return outputLines;
|
||||
}
|
||||
|
||||
// Main execution function for a single file
|
||||
async function processFile(filePath: string): Promise<void> {
|
||||
const originalFileContents = await Bun.file(filePath).text();
|
||||
let fileContents = originalFileContents;
|
||||
|
||||
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
|
||||
return;
|
||||
}
|
||||
console.log(`Processing: ${filePath}`);
|
||||
|
||||
let needsRecurse = true;
|
||||
while (needsRecurse) {
|
||||
needsRecurse = false;
|
||||
|
||||
const lines = fileContents.split("\n");
|
||||
|
||||
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
|
||||
const groups = groupDeclarationsByImportPath(declarations);
|
||||
|
||||
promoteItemsWithChildGroups(groups);
|
||||
mergeSingleItemGroups(groups);
|
||||
const sortedGroupKeys = sortGroupsAndDeclarations(groups);
|
||||
|
||||
const sortedLines = generateSortedOutput(lines, groups, sortedGroupKeys);
|
||||
|
||||
// Remove unused declarations
|
||||
if (config.removeUnused) {
|
||||
for (const line of unusedLineIndices) {
|
||||
sortedLines[line] = "";
|
||||
needsRecurse = true;
|
||||
}
|
||||
}
|
||||
fileContents = sortedLines.join("\n");
|
||||
}
|
||||
|
||||
// Remove any leading newlines
|
||||
fileContents = fileContents.replace(/^\n+/, "");
|
||||
|
||||
// Maximum of one empty line
|
||||
fileContents = fileContents.replace(/\n\n+/g, "\n\n");
|
||||
|
||||
// Ensure exactly one trailing newline
|
||||
fileContents = fileContents.replace(/\s*$/, "\n");
|
||||
|
||||
// If the file is empty, remove the trailing newline
|
||||
if (fileContents === "\n") fileContents = "";
|
||||
|
||||
if (fileContents === originalFileContents) {
|
||||
console.log(`✓ No changes: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Write the sorted file
|
||||
await Bun.write(filePath, fileContents);
|
||||
|
||||
console.log(`✓ Done: ${filePath}`);
|
||||
}
|
||||
|
||||
// Process all files
|
||||
async function main() {
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const stat = await Bun.file(filePath).stat();
|
||||
if (stat.isDirectory()) {
|
||||
const files = readdirSync(filePath, { recursive: true });
|
||||
for (const file of files) {
|
||||
if (typeof file !== "string" || !file.endsWith(".zig")) continue;
|
||||
try {
|
||||
await processFile(path.join(filePath, file));
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await processFile(filePath);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to process ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nSummary: ${successCount} files processed successfully, ${errorCount} errors`);
|
||||
|
||||
if (errorCount > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
} from "node:fs";
|
||||
import { connect } from "node:net";
|
||||
import { hostname, homedir as nodeHomedir, tmpdir as nodeTmpdir, release, userInfo } from "node:os";
|
||||
import { dirname, join, relative, resolve } from "node:path";
|
||||
import { basename, dirname, join, relative, resolve } from "node:path";
|
||||
import { normalize as normalizeWindows } from "node:path/win32";
|
||||
|
||||
export const isWindows = process.platform === "win32";
|
||||
@@ -1370,13 +1370,16 @@ export async function getLastSuccessfulBuild() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} filename
|
||||
* @param {string} [cwd]
|
||||
* @param {string} filename Absolute path to file to upload
|
||||
*/
|
||||
export async function uploadArtifact(filename, cwd) {
|
||||
export async function uploadArtifact(filename) {
|
||||
if (isBuildkite) {
|
||||
const relativePath = relative(cwd ?? process.cwd(), filename);
|
||||
await spawnSafe(["buildkite-agent", "artifact", "upload", relativePath], { cwd, stdio: "inherit" });
|
||||
await spawnSafe(["buildkite-agent", "artifact", "upload", basename(filename)], {
|
||||
cwd: dirname(filename),
|
||||
stdio: "inherit",
|
||||
});
|
||||
} else {
|
||||
console.warn(`not in buildkite. artifact ${filename} not uploaded.`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2840,6 +2843,20 @@ export function printEnvironment() {
|
||||
spawnSync([shell, "-c", "ulimit -a"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
startGroup("Disk (df)", () => {
|
||||
const shell = which(["sh", "bash"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "df"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isWindows) {
|
||||
startGroup("Disk (win)", () => {
|
||||
const shell = which(["pwsh"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "get-psdrive"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -97,6 +97,12 @@ pub const StandaloneModuleGraph = struct {
|
||||
encoding: Encoding = .latin1,
|
||||
loader: bun.options.Loader = .file,
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
};
|
||||
|
||||
pub const FileSide = enum(u8) {
|
||||
server = 0,
|
||||
client = 1,
|
||||
};
|
||||
|
||||
pub const Encoding = enum(u8) {
|
||||
@@ -141,6 +147,11 @@ pub const StandaloneModuleGraph = struct {
|
||||
wtf_string: bun.String = bun.String.empty,
|
||||
bytecode: []u8 = "",
|
||||
module_format: ModuleFormat = .none,
|
||||
side: FileSide = .server,
|
||||
|
||||
pub fn appearsInEmbeddedFilesArray(this: *const File) bool {
|
||||
return this.side == .client or !this.loader.isJavaScriptLike();
|
||||
}
|
||||
|
||||
pub fn stat(this: *const File) bun.Stat {
|
||||
var result = std.mem.zeroes(bun.Stat);
|
||||
@@ -226,6 +237,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
null,
|
||||
std.math.maxInt(i32),
|
||||
std.math.maxInt(i32),
|
||||
.{},
|
||||
)) {
|
||||
.success => |x| x,
|
||||
.fail => {
|
||||
@@ -251,7 +263,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
});
|
||||
|
||||
stored.external_source_names = file_names;
|
||||
stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)), .load_hint = .none };
|
||||
stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)), .load_hint = .none, .kind = .zig };
|
||||
stored.is_standalone_module_graph = true;
|
||||
|
||||
const parsed = bun.new(SourceMap.ParsedSourceMap, stored);
|
||||
@@ -300,6 +312,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.none,
|
||||
.bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{},
|
||||
.module_format = module.module_format,
|
||||
.side = module.side,
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -347,8 +360,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256;
|
||||
} else {
|
||||
if (entry_point_id == null) {
|
||||
if (output_file.output_kind == .@"entry-point") {
|
||||
entry_point_id = module_count;
|
||||
if (output_file.side == null or output_file.side.? == .server) {
|
||||
if (output_file.output_kind == .@"entry-point") {
|
||||
entry_point_id = module_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -421,6 +436,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
else => .none,
|
||||
} else .none,
|
||||
.bytecode = bytecode,
|
||||
.side = switch (output_file.side orelse .server) {
|
||||
.server => .server,
|
||||
.client => .client,
|
||||
},
|
||||
};
|
||||
|
||||
if (output_file.source_map_index != std.math.maxInt(u32)) {
|
||||
@@ -839,7 +858,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.fromStdDir(root_dir),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
|
||||
) catch |err| {
|
||||
if (err == error.IsDir) {
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) });
|
||||
|
||||
@@ -307,7 +307,7 @@ fn appendFileAssumeCapacity(
|
||||
loader: options.Loader,
|
||||
parent_hash: HashType,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
@@ -320,7 +320,7 @@ fn appendFileAssumeCapacity(
|
||||
|
||||
const watchlist_id = this.watchlist.len;
|
||||
|
||||
const file_path_: string = if (comptime copy_file_path)
|
||||
const file_path_: string = if (comptime clone_file_path)
|
||||
bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory())
|
||||
else
|
||||
file_path;
|
||||
@@ -383,13 +383,12 @@ fn appendFileAssumeCapacity(
|
||||
this.watchlist.appendAssumeCapacity(item);
|
||||
return .{ .result = {} };
|
||||
}
|
||||
|
||||
fn appendDirectoryAssumeCapacity(
|
||||
this: *Watcher,
|
||||
stored_fd: bun.FileDescriptor,
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we can only watch items that are in the directory tree of the top level dir
|
||||
@@ -408,13 +407,13 @@ fn appendDirectoryAssumeCapacity(
|
||||
};
|
||||
};
|
||||
|
||||
const parent_hash = getHash(bun.fs.PathName.init(file_path).dirWithTrailingSlash());
|
||||
|
||||
const file_path_: string = if (comptime copy_file_path)
|
||||
const file_path_: string = if (comptime clone_file_path)
|
||||
bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory())
|
||||
else
|
||||
file_path;
|
||||
|
||||
const parent_hash = getHash(bun.fs.PathName.init(file_path_).dirWithTrailingSlash());
|
||||
|
||||
const watchlist_id = this.watchlist.len;
|
||||
|
||||
var item = WatchItem{
|
||||
@@ -464,13 +463,21 @@ fn appendDirectoryAssumeCapacity(
|
||||
null,
|
||||
);
|
||||
} else if (Environment.isLinux) {
|
||||
const file_path_to_use_ = std.mem.trimRight(u8, file_path_, "/");
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
bun.copy(u8, &buf, file_path_to_use_);
|
||||
buf[file_path_to_use_.len] = 0;
|
||||
const slice: [:0]u8 = buf[0..file_path_to_use_.len :0];
|
||||
item.eventlist_index = switch (this.platform.watchDir(slice)) {
|
||||
.err => |err| return .{ .err = err },
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer {
|
||||
bun.path_buffer_pool.put(buf);
|
||||
}
|
||||
const path: [:0]const u8 = if (clone_file_path and file_path_.len > 0 and file_path_[file_path_.len - 1] == 0)
|
||||
file_path_[0 .. file_path_.len - 1 :0]
|
||||
else brk: {
|
||||
const trailing_slash = if (file_path_.len > 1) std.mem.trimRight(u8, file_path_, &.{ 0, '/' }) else file_path_;
|
||||
@memcpy(buf[0..trailing_slash.len], trailing_slash);
|
||||
buf[trailing_slash.len] = 0;
|
||||
break :brk buf[0..trailing_slash.len :0];
|
||||
};
|
||||
|
||||
item.eventlist_index = switch (this.platform.watchDir(path)) {
|
||||
.err => |err| return .{ .err = err.withPath(file_path) },
|
||||
.result => |r| r,
|
||||
};
|
||||
}
|
||||
@@ -491,7 +498,7 @@ pub fn appendFileMaybeLock(
|
||||
loader: options.Loader,
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
comptime lock: bool,
|
||||
) bun.JSC.Maybe(void) {
|
||||
if (comptime lock) this.mutex.lock();
|
||||
@@ -524,8 +531,8 @@ pub fn appendFileMaybeLock(
|
||||
this.watchlist.ensureUnusedCapacity(this.allocator, 1 + @as(usize, @intCast(@intFromBool(parent_watch_item == null)))) catch bun.outOfMemory();
|
||||
|
||||
if (autowatch_parent_dir) {
|
||||
parent_watch_item = parent_watch_item orelse switch (this.appendDirectoryAssumeCapacity(dir_fd, parent_dir, parent_dir_hash, copy_file_path)) {
|
||||
.err => |err| return .{ .err = err },
|
||||
parent_watch_item = parent_watch_item orelse switch (this.appendDirectoryAssumeCapacity(dir_fd, parent_dir, parent_dir_hash, clone_file_path)) {
|
||||
.err => |err| return .{ .err = err.withPath(parent_dir) },
|
||||
.result => |r| r,
|
||||
};
|
||||
}
|
||||
@@ -537,9 +544,9 @@ pub fn appendFileMaybeLock(
|
||||
loader,
|
||||
parent_dir_hash,
|
||||
package_json,
|
||||
copy_file_path,
|
||||
clone_file_path,
|
||||
)) {
|
||||
.err => |err| return .{ .err = err },
|
||||
.err => |err| return .{ .err = err.withPath(file_path) },
|
||||
.result => {},
|
||||
}
|
||||
|
||||
@@ -568,9 +575,9 @@ pub fn appendFile(
|
||||
loader: options.Loader,
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.JSC.Maybe(void) {
|
||||
return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, copy_file_path, true);
|
||||
return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, true);
|
||||
}
|
||||
|
||||
pub fn addDirectory(
|
||||
@@ -578,7 +585,7 @@ pub fn addDirectory(
|
||||
fd: bun.FileDescriptor,
|
||||
file_path: string,
|
||||
hash: HashType,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.JSC.Maybe(WatchItemIndex) {
|
||||
this.mutex.lock();
|
||||
defer this.mutex.unlock();
|
||||
@@ -589,7 +596,7 @@ pub fn addDirectory(
|
||||
|
||||
this.watchlist.ensureUnusedCapacity(this.allocator, 1) catch bun.outOfMemory();
|
||||
|
||||
return this.appendDirectoryAssumeCapacity(fd, file_path, hash, copy_file_path);
|
||||
return this.appendDirectoryAssumeCapacity(fd, file_path, hash, clone_file_path);
|
||||
}
|
||||
|
||||
pub fn addFile(
|
||||
@@ -600,7 +607,7 @@ pub fn addFile(
|
||||
loader: options.Loader,
|
||||
dir_fd: bun.FileDescriptor,
|
||||
package_json: ?*PackageJSON,
|
||||
comptime copy_file_path: bool,
|
||||
comptime clone_file_path: bool,
|
||||
) bun.JSC.Maybe(void) {
|
||||
// This must lock due to concurrent transpiler
|
||||
this.mutex.lock();
|
||||
@@ -617,7 +624,7 @@ pub fn addFile(
|
||||
return .{ .result = {} };
|
||||
}
|
||||
|
||||
return this.appendFileMaybeLock(fd, file_path, hash, loader, dir_fd, package_json, copy_file_path, false);
|
||||
return this.appendFileMaybeLock(fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, false);
|
||||
}
|
||||
|
||||
pub fn indexOf(this: *Watcher, hash: HashType) ?u32 {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//! It also allows measuring how much memory a scope has allocated.
|
||||
const AllocationScope = @This();
|
||||
|
||||
pub const enabled = bun.Environment.isDebug;
|
||||
pub const enabled = bun.Environment.enableAllocScopes;
|
||||
|
||||
parent: Allocator,
|
||||
state: if (enabled) struct {
|
||||
@@ -36,7 +36,7 @@ pub const Extra = union(enum) {
|
||||
};
|
||||
|
||||
pub fn init(parent: Allocator) AllocationScope {
|
||||
return if (enabled)
|
||||
return if (comptime enabled)
|
||||
.{
|
||||
.parent = parent,
|
||||
.state = .{
|
||||
@@ -52,7 +52,7 @@ pub fn init(parent: Allocator) AllocationScope {
|
||||
}
|
||||
|
||||
pub fn deinit(scope: *AllocationScope) void {
|
||||
if (enabled) {
|
||||
if (comptime enabled) {
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.allocations.deinit(scope.parent);
|
||||
const count = scope.state.allocations.count();
|
||||
@@ -83,7 +83,7 @@ pub fn deinit(scope: *AllocationScope) void {
|
||||
}
|
||||
|
||||
pub fn allocator(scope: *AllocationScope) Allocator {
|
||||
return if (enabled) .{ .ptr = scope, .vtable = &vtable } else scope.parent;
|
||||
return if (comptime enabled) .{ .ptr = scope, .vtable = &vtable } else scope.parent;
|
||||
}
|
||||
|
||||
const vtable: Allocator.VTable = .{
|
||||
@@ -176,7 +176,7 @@ fn trackFreeAssumeLocked(scope: *AllocationScope, buf: []const u8, ret_addr: usi
|
||||
}
|
||||
|
||||
pub fn assertOwned(scope: *AllocationScope, ptr: anytype) void {
|
||||
if (!enabled) return;
|
||||
if (comptime !enabled) return;
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
@@ -188,7 +188,7 @@ pub fn assertOwned(scope: *AllocationScope, ptr: anytype) void {
|
||||
}
|
||||
|
||||
pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void {
|
||||
if (!enabled) return;
|
||||
if (comptime !enabled) return;
|
||||
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
|
||||
.c, .one, .many => ptr,
|
||||
.slice => if (ptr.len > 0) ptr.ptr else return,
|
||||
@@ -196,7 +196,7 @@ pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void {
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
if (scope.state.allocations.getPtr(cast_ptr)) |owned| {
|
||||
Output.debugWarn("Pointer allocated here:");
|
||||
Output.warn("Owned pointer allocated here:");
|
||||
bun.crash_handler.dumpStackTrace(owned.allocated_at.trace(), trace_limits, trace_limits);
|
||||
}
|
||||
@panic("this pointer was owned by the allocation scope when it was not supposed to be");
|
||||
@@ -205,7 +205,7 @@ pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void {
|
||||
/// Track an arbitrary pointer. Extra data can be stored in the allocation,
|
||||
/// which will be printed when a leak is detected.
|
||||
pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_addr: ?usize, extra: Extra) void {
|
||||
if (!enabled) return;
|
||||
if (comptime !enabled) return;
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
scope.state.allocations.ensureUnusedCapacity(scope.parent, 1) catch bun.outOfMemory();
|
||||
@@ -214,15 +214,29 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add
|
||||
|
||||
/// Call when the pointer from `trackExternalAllocation` is freed.
|
||||
/// Returns true if the free was invalid.
|
||||
pub fn trackExternalFree(scope: *AllocationScope, ptr: []const u8, ret_addr: ?usize) bool {
|
||||
if (!enabled) return;
|
||||
pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool {
|
||||
if (comptime !enabled) return;
|
||||
const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) {
|
||||
.pointer => |p| switch (p.size) {
|
||||
.slice => brk: {
|
||||
if (p.child != u8) @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice)));
|
||||
if (p.sentinel_ptr == null) break :brk slice;
|
||||
// Ensure we include the sentinel value
|
||||
break :brk slice[0 .. slice.len + 1];
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
},
|
||||
else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))),
|
||||
};
|
||||
// Empty slice usually means invalid pointer
|
||||
if (ptr.len == 0) return false;
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
return trackFreeAssumeLocked(scope, ptr, ret_addr orelse @returnAddress());
|
||||
}
|
||||
|
||||
pub fn setPointerExtra(scope: *AllocationScope, ptr: *anyopaque, extra: Extra) void {
|
||||
if (!enabled) return;
|
||||
if (comptime !enabled) return;
|
||||
scope.state.mutex.lock();
|
||||
defer scope.state.mutex.unlock();
|
||||
const allocation = scope.state.allocations.getPtr(ptr) orelse
|
||||
|
||||
@@ -93,6 +93,8 @@ pub const Features = struct {
|
||||
pub var loaders: usize = 0;
|
||||
pub var lockfile_migration_from_package_lock: usize = 0;
|
||||
pub var text_lockfile: usize = 0;
|
||||
pub var isolated_bun_install: usize = 0;
|
||||
pub var hoisted_bun_install: usize = 0;
|
||||
pub var macros: usize = 0;
|
||||
pub var no_avx2: usize = 0;
|
||||
pub var no_avx: usize = 0;
|
||||
|
||||
96
src/ast/ASTMemoryAllocator.zig
Normal file
96
src/ast/ASTMemoryAllocator.zig
Normal file
@@ -0,0 +1,96 @@
|
||||
const SFA = std.heap.StackFallbackAllocator(@min(8192, std.heap.page_size_min));
|
||||
|
||||
stack_allocator: SFA = undefined,
|
||||
bump_allocator: std.mem.Allocator = undefined,
|
||||
allocator: std.mem.Allocator,
|
||||
previous: ?*ASTMemoryAllocator = null,
|
||||
|
||||
pub fn enter(this: *ASTMemoryAllocator, allocator: std.mem.Allocator) ASTMemoryAllocator.Scope {
|
||||
this.allocator = allocator;
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = allocator,
|
||||
.fixed_buffer_allocator = undefined,
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
this.previous = null;
|
||||
var ast_scope = ASTMemoryAllocator.Scope{
|
||||
.current = this,
|
||||
.previous = Stmt.Data.Store.memory_allocator,
|
||||
};
|
||||
ast_scope.enter();
|
||||
return ast_scope;
|
||||
}
|
||||
pub const Scope = struct {
|
||||
current: ?*ASTMemoryAllocator = null,
|
||||
previous: ?*ASTMemoryAllocator = null,
|
||||
|
||||
pub fn enter(this: *@This()) void {
|
||||
bun.debugAssert(Expr.Data.Store.memory_allocator == Stmt.Data.Store.memory_allocator);
|
||||
|
||||
this.previous = Expr.Data.Store.memory_allocator;
|
||||
|
||||
const current = this.current;
|
||||
|
||||
Expr.Data.Store.memory_allocator = current;
|
||||
Stmt.Data.Store.memory_allocator = current;
|
||||
|
||||
if (current == null) {
|
||||
Stmt.Data.Store.begin();
|
||||
Expr.Data.Store.begin();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exit(this: *const @This()) void {
|
||||
Expr.Data.Store.memory_allocator = this.previous;
|
||||
Stmt.Data.Store.memory_allocator = this.previous;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn reset(this: *ASTMemoryAllocator) void {
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = this.allocator,
|
||||
.fixed_buffer_allocator = undefined,
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
pub fn push(this: *ASTMemoryAllocator) void {
|
||||
Stmt.Data.Store.memory_allocator = this;
|
||||
Expr.Data.Store.memory_allocator = this;
|
||||
}
|
||||
|
||||
pub fn pop(this: *ASTMemoryAllocator) void {
|
||||
const prev = this.previous;
|
||||
bun.assert(prev != this);
|
||||
Stmt.Data.Store.memory_allocator = prev;
|
||||
Expr.Data.Store.memory_allocator = prev;
|
||||
this.previous = null;
|
||||
}
|
||||
|
||||
pub fn append(this: ASTMemoryAllocator, comptime ValueType: type, value: anytype) *ValueType {
|
||||
const ptr = this.bump_allocator.create(ValueType) catch unreachable;
|
||||
ptr.* = value;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/// Initialize ASTMemoryAllocator as `undefined`, and call this.
|
||||
pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) void {
|
||||
this.stack_allocator = SFA{
|
||||
.buffer = undefined,
|
||||
.fallback_allocator = arena,
|
||||
.fixed_buffer_allocator = .init(&.{}),
|
||||
};
|
||||
this.bump_allocator = this.stack_allocator.get();
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const Stmt = js_ast.Stmt;
|
||||
143
src/ast/Ast.zig
Normal file
143
src/ast/Ast.zig
Normal file
@@ -0,0 +1,143 @@
|
||||
pub const TopLevelSymbolToParts = std.ArrayHashMapUnmanaged(Ref, BabyList(u32), Ref.ArrayHashCtx, false);
|
||||
|
||||
approximate_newline_count: usize = 0,
|
||||
has_lazy_export: bool = false,
|
||||
runtime_imports: Runtime.Imports = .{},
|
||||
|
||||
nested_scope_slot_counts: SlotCounts = SlotCounts{},
|
||||
|
||||
runtime_import_record_id: ?u32 = null,
|
||||
needs_runtime: bool = false,
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
has_top_level_return: bool = false,
|
||||
uses_exports_ref: bool = false,
|
||||
uses_module_ref: bool = false,
|
||||
uses_require_ref: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
|
||||
force_cjs_to_esm: bool = false,
|
||||
exports_kind: ExportsKind = ExportsKind.none,
|
||||
|
||||
// This is a list of ES6 features. They are ranges instead of booleans so
|
||||
// that they can be used in log messages. Check to see if "Len > 0".
|
||||
import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = logger.Range.None,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
import_records: ImportRecord.List = .{},
|
||||
|
||||
hashbang: string = "",
|
||||
directive: ?string = null,
|
||||
parts: Part.List = Part.List{},
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
symbols: Symbol.List = Symbol.List{},
|
||||
module_scope: Scope = Scope{},
|
||||
char_freq: ?CharFreq = null,
|
||||
exports_ref: Ref = Ref.None,
|
||||
module_ref: Ref = Ref.None,
|
||||
/// When using format .bake_internal_dev, this is the HMR variable instead
|
||||
/// of the wrapper. This is because that format does not store module
|
||||
/// wrappers in a variable.
|
||||
wrapper_ref: Ref = Ref.None,
|
||||
require_ref: Ref = Ref.None,
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
named_imports: NamedImports = .{},
|
||||
named_exports: NamedExports = .{},
|
||||
export_star_import_records: []u32 = &([_]u32{}),
|
||||
|
||||
// allocator: std.mem.Allocator,
|
||||
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
|
||||
|
||||
commonjs_named_exports: CommonJSNamedExports = .{},
|
||||
|
||||
redirect_import_record_index: ?u32 = null,
|
||||
|
||||
/// Only populated when bundling
|
||||
target: bun.options.Target = .browser,
|
||||
// const_values: ConstValuesMap = .{},
|
||||
ts_enums: TsEnumsMap = .{},
|
||||
|
||||
/// Not to be confused with `commonjs_named_exports`
|
||||
/// This is a list of named exports that may exist in a CommonJS module
|
||||
/// We use this with `commonjs_at_runtime` to re-export CommonJS
|
||||
has_commonjs_export_names: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
loc_ref: LocRef,
|
||||
needs_decl: bool = true,
|
||||
};
|
||||
pub const CommonJSNamedExports = bun.StringArrayHashMapUnmanaged(CommonJSNamedExport);
|
||||
|
||||
pub const NamedImports = std.ArrayHashMapUnmanaged(Ref, NamedImport, RefHashCtx, true);
|
||||
pub const NamedExports = bun.StringArrayHashMapUnmanaged(NamedExport);
|
||||
pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false);
|
||||
pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false);
|
||||
|
||||
pub fn fromParts(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initTest(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} };
|
||||
|
||||
pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
|
||||
const opts = std.json.StringifyOptions{ .whitespace = std.json.StringifyOptions.Whitespace{
|
||||
.separator = true,
|
||||
} };
|
||||
try std.json.stringify(self.parts, opts, stream);
|
||||
}
|
||||
|
||||
/// Do not call this if it wasn't globally allocated!
|
||||
pub fn deinit(this: *Ast) void {
|
||||
// TODO: assert mimalloc-owned memory
|
||||
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
const Runtime = @import("../runtime.zig").Runtime;
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const Ast = js_ast.Ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Expr = js_ast.Expr;
|
||||
const InlinedEnumValue = js_ast.InlinedEnumValue;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const NamedExport = js_ast.NamedExport;
|
||||
const NamedImport = js_ast.NamedImport;
|
||||
const Part = js_ast.Part;
|
||||
const Ref = js_ast.Ref;
|
||||
const RefHashCtx = js_ast.RefHashCtx;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
106
src/ast/B.zig
Normal file
106
src/ast/B.zig
Normal file
@@ -0,0 +1,106 @@
|
||||
/// B is for Binding! Bindings are on the left side of variable
|
||||
/// declarations (s_local), which is how destructuring assignments
|
||||
/// are represented in memory. Consider a basic example.
|
||||
///
|
||||
/// let hello = world;
|
||||
/// ^ ^
|
||||
/// | E.Identifier
|
||||
/// B.Identifier
|
||||
///
|
||||
/// Bindings can be nested
|
||||
///
|
||||
/// B.Array
|
||||
/// | B.Identifier
|
||||
/// | |
|
||||
/// let { foo: [ bar ] } = ...
|
||||
/// ----------------
|
||||
/// B.Object
|
||||
pub const B = union(Binding.Tag) {
|
||||
// let x = ...
|
||||
b_identifier: *B.Identifier,
|
||||
// let [a, b] = ...
|
||||
b_array: *B.Array,
|
||||
// let { a, b: c } = ...
|
||||
b_object: *B.Object,
|
||||
// this is used to represent array holes
|
||||
b_missing: B.Missing,
|
||||
|
||||
pub const Identifier = struct {
|
||||
ref: Ref,
|
||||
};
|
||||
|
||||
pub const Property = struct {
|
||||
flags: Flags.Property.Set = Flags.Property.None,
|
||||
key: ExprNodeIndex,
|
||||
value: Binding,
|
||||
default_value: ?Expr = null,
|
||||
};
|
||||
|
||||
pub const Object = struct {
|
||||
properties: []B.Property,
|
||||
is_single_line: bool = false,
|
||||
|
||||
pub const Property = B.Property;
|
||||
};
|
||||
|
||||
pub const Array = struct {
|
||||
items: []ArrayBinding,
|
||||
has_spread: bool = false,
|
||||
is_single_line: bool = false,
|
||||
|
||||
pub const Item = ArrayBinding;
|
||||
};
|
||||
|
||||
pub const Missing = struct {};
|
||||
|
||||
/// This hash function is currently only used for React Fast Refresh transform.
|
||||
/// This doesn't include the `is_single_line` properties, as they only affect whitespace.
|
||||
pub fn writeToHasher(b: B, hasher: anytype, symbol_table: anytype) void {
|
||||
switch (b) {
|
||||
.b_identifier => |id| {
|
||||
const original_name = id.ref.getSymbol(symbol_table).original_name;
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), original_name.len });
|
||||
},
|
||||
.b_array => |array| {
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), array.has_spread, array.items.len });
|
||||
for (array.items) |item| {
|
||||
writeAnyToHasher(hasher, .{item.default_value != null});
|
||||
if (item.default_value) |default| {
|
||||
default.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
item.binding.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
},
|
||||
.b_object => |object| {
|
||||
writeAnyToHasher(hasher, .{ std.meta.activeTag(b), object.properties.len });
|
||||
for (object.properties) |property| {
|
||||
writeAnyToHasher(hasher, .{ property.default_value != null, property.flags });
|
||||
if (property.default_value) |default| {
|
||||
default.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
property.key.data.writeToHasher(hasher, symbol_table);
|
||||
property.value.data.writeToHasher(hasher, symbol_table);
|
||||
}
|
||||
},
|
||||
.b_missing => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const writeAnyToHasher = bun.writeAnyToHasher;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const ArrayBinding = js_ast.ArrayBinding;
|
||||
const Binding = js_ast.Binding;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const Flags = js_ast.Flags;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
165
src/ast/Binding.zig
Normal file
165
src/ast/Binding.zig
Normal file
@@ -0,0 +1,165 @@
|
||||
loc: logger.Loc,
|
||||
data: B,
|
||||
|
||||
const Serializable = struct {
|
||||
type: Tag,
|
||||
object: string,
|
||||
value: B,
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type {
|
||||
const ExprType = expr_type;
|
||||
return struct {
|
||||
context: *ExprType,
|
||||
allocator: std.mem.Allocator,
|
||||
pub const Context = @This();
|
||||
|
||||
pub fn wrapIdentifier(ctx: *const Context, loc: logger.Loc, ref: Ref) Expr {
|
||||
return func_type(ctx.context, loc, ref);
|
||||
}
|
||||
|
||||
pub fn init(context: *ExprType) Context {
|
||||
return Context{ .context = context, .allocator = context.allocator };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
const loc = binding.loc;
|
||||
|
||||
switch (binding.data) {
|
||||
.b_missing => {
|
||||
return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = loc };
|
||||
},
|
||||
.b_identifier => |b| {
|
||||
return wrapper.wrapIdentifier(loc, b.ref);
|
||||
},
|
||||
.b_array => |b| {
|
||||
var exprs = wrapper.allocator.alloc(Expr, b.items.len) catch unreachable;
|
||||
var i: usize = 0;
|
||||
while (i < exprs.len) : (i += 1) {
|
||||
const item = b.items[i];
|
||||
exprs[i] = convert: {
|
||||
const expr = toExpr(&item.binding, wrapper);
|
||||
if (b.has_spread and i == exprs.len - 1) {
|
||||
break :convert Expr.init(E.Spread, E.Spread{ .value = expr }, expr.loc);
|
||||
} else if (item.default_value) |default| {
|
||||
break :convert Expr.assign(expr, default);
|
||||
} else {
|
||||
break :convert expr;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
|
||||
},
|
||||
.b_object => |b| {
|
||||
const properties = wrapper
|
||||
.allocator
|
||||
.alloc(G.Property, b.properties.len) catch unreachable;
|
||||
for (properties, b.properties) |*property, item| {
|
||||
property.* = .{
|
||||
.flags = item.flags,
|
||||
.key = item.key,
|
||||
.kind = if (item.flags.contains(.is_spread))
|
||||
.spread
|
||||
else
|
||||
.normal,
|
||||
.value = toExpr(&item.value, wrapper),
|
||||
.initializer = item.default_value,
|
||||
};
|
||||
}
|
||||
return Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = G.Property.List.init(properties),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub const Tag = enum(u5) {
|
||||
b_identifier,
|
||||
b_array,
|
||||
b_object,
|
||||
b_missing,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
pub var icount: usize = 0;
|
||||
|
||||
pub fn init(t: anytype, loc: logger.Loc) Binding {
|
||||
icount += 1;
|
||||
switch (@TypeOf(t)) {
|
||||
*B.Identifier => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_identifier = t } };
|
||||
},
|
||||
*B.Array => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_array = t } };
|
||||
},
|
||||
*B.Object => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_object = t } };
|
||||
},
|
||||
B.Missing => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_missing = t } };
|
||||
},
|
||||
else => {
|
||||
@compileError("Invalid type passed to Binding.init");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding {
|
||||
icount += 1;
|
||||
switch (@TypeOf(t)) {
|
||||
B.Identifier => {
|
||||
const data = allocator.create(B.Identifier) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_identifier = data } };
|
||||
},
|
||||
B.Array => {
|
||||
const data = allocator.create(B.Array) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_array = data } };
|
||||
},
|
||||
B.Object => {
|
||||
const data = allocator.create(B.Object) catch unreachable;
|
||||
data.* = t;
|
||||
return Binding{ .loc = loc, .data = B{ .b_object = data } };
|
||||
},
|
||||
B.Missing => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_missing = .{} } };
|
||||
},
|
||||
else => {
|
||||
@compileError("Invalid type passed to Binding.alloc");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const G = js_ast.G;
|
||||
const Ref = js_ast.Ref;
|
||||
231
src/ast/BundledAst.zig
Normal file
231
src/ast/BundledAst.zig
Normal file
@@ -0,0 +1,231 @@
|
||||
//! Like Ast but slimmer and for bundling only.
|
||||
//!
|
||||
//! On Linux, the hottest function in the bundler is:
|
||||
//! src.multi_array_list.MultiArrayList(src.js_ast.Ast).ensureTotalCapacity
|
||||
//! https://share.firefox.dev/3NNlRKt
|
||||
//!
|
||||
//! So we make a slimmer version of Ast for bundling that doesn't allocate as much memory
|
||||
|
||||
approximate_newline_count: u32 = 0,
|
||||
nested_scope_slot_counts: SlotCounts = .{},
|
||||
|
||||
exports_kind: ExportsKind = .none,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
import_records: ImportRecord.List = .{},
|
||||
|
||||
hashbang: string = "",
|
||||
parts: Part.List = .{},
|
||||
css: ?*bun.css.BundlerStyleSheet = null,
|
||||
url_for_css: []const u8 = "",
|
||||
symbols: Symbol.List = .{},
|
||||
module_scope: Scope = .{},
|
||||
char_freq: CharFreq = undefined,
|
||||
exports_ref: Ref = Ref.None,
|
||||
module_ref: Ref = Ref.None,
|
||||
wrapper_ref: Ref = Ref.None,
|
||||
require_ref: Ref = Ref.None,
|
||||
top_level_await_keyword: logger.Range,
|
||||
tla_check: TlaCheck = .{},
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
named_imports: NamedImports = .{},
|
||||
named_exports: NamedExports = .{},
|
||||
export_star_import_records: []u32 = &.{},
|
||||
|
||||
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
|
||||
|
||||
commonjs_named_exports: CommonJSNamedExports = .{},
|
||||
|
||||
redirect_import_record_index: u32 = std.math.maxInt(u32),
|
||||
|
||||
/// Only populated when bundling. When --server-components is passed, this
|
||||
/// will be .browser when it is a client component, and the server's target
|
||||
/// on the server.
|
||||
target: bun.options.Target = .browser,
|
||||
|
||||
// const_values: ConstValuesMap = .{},
|
||||
ts_enums: Ast.TsEnumsMap = .{},
|
||||
|
||||
flags: BundledAst.Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
// closure.
|
||||
uses_exports_ref: bool = false,
|
||||
uses_module_ref: bool = false,
|
||||
// uses_require_ref: bool = false,
|
||||
uses_export_keyword: bool = false,
|
||||
has_char_freq: bool = false,
|
||||
force_cjs_to_esm: bool = false,
|
||||
has_lazy_export: bool = false,
|
||||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||||
has_explicit_use_strict_directive: bool = false,
|
||||
};
|
||||
|
||||
pub const empty = BundledAst.init(Ast.empty);
|
||||
|
||||
pub fn toAST(this: *const BundledAst) Ast {
|
||||
return .{
|
||||
.approximate_newline_count = this.approximate_newline_count,
|
||||
.nested_scope_slot_counts = this.nested_scope_slot_counts,
|
||||
|
||||
.exports_kind = this.exports_kind,
|
||||
|
||||
.import_records = this.import_records,
|
||||
|
||||
.hashbang = this.hashbang,
|
||||
.parts = this.parts,
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
.symbols = this.symbols,
|
||||
.module_scope = this.module_scope,
|
||||
.char_freq = if (this.flags.has_char_freq) this.char_freq else null,
|
||||
.exports_ref = this.exports_ref,
|
||||
.module_ref = this.module_ref,
|
||||
.wrapper_ref = this.wrapper_ref,
|
||||
.require_ref = this.require_ref,
|
||||
.top_level_await_keyword = this.top_level_await_keyword,
|
||||
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
.named_imports = this.named_imports,
|
||||
.named_exports = this.named_exports,
|
||||
.export_star_import_records = this.export_star_import_records,
|
||||
|
||||
.top_level_symbols_to_parts = this.top_level_symbols_to_parts,
|
||||
|
||||
.commonjs_named_exports = this.commonjs_named_exports,
|
||||
|
||||
.redirect_import_record_index = this.redirect_import_record_index,
|
||||
|
||||
.target = this.target,
|
||||
|
||||
// .const_values = this.const_values,
|
||||
.ts_enums = this.ts_enums,
|
||||
|
||||
.uses_exports_ref = this.flags.uses_exports_ref,
|
||||
.uses_module_ref = this.flags.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} },
|
||||
.force_cjs_to_esm = this.flags.force_cjs_to_esm,
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
.directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init(ast: Ast) BundledAst {
|
||||
return .{
|
||||
.approximate_newline_count = @as(u32, @truncate(ast.approximate_newline_count)),
|
||||
.nested_scope_slot_counts = ast.nested_scope_slot_counts,
|
||||
|
||||
.exports_kind = ast.exports_kind,
|
||||
|
||||
.import_records = ast.import_records,
|
||||
|
||||
.hashbang = ast.hashbang,
|
||||
.parts = ast.parts,
|
||||
// This list may be mutated later, so we should store the capacity
|
||||
.symbols = ast.symbols,
|
||||
.module_scope = ast.module_scope,
|
||||
.char_freq = ast.char_freq orelse undefined,
|
||||
.exports_ref = ast.exports_ref,
|
||||
.module_ref = ast.module_ref,
|
||||
.wrapper_ref = ast.wrapper_ref,
|
||||
.require_ref = ast.require_ref,
|
||||
.top_level_await_keyword = ast.top_level_await_keyword,
|
||||
// These are used when bundling. They are filled in during the parser pass
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
.named_imports = ast.named_imports,
|
||||
.named_exports = ast.named_exports,
|
||||
.export_star_import_records = ast.export_star_import_records,
|
||||
|
||||
// .allocator = ast.allocator,
|
||||
.top_level_symbols_to_parts = ast.top_level_symbols_to_parts,
|
||||
|
||||
.commonjs_named_exports = ast.commonjs_named_exports,
|
||||
|
||||
.redirect_import_record_index = ast.redirect_import_record_index orelse std.math.maxInt(u32),
|
||||
|
||||
.target = ast.target,
|
||||
|
||||
// .const_values = ast.const_values,
|
||||
.ts_enums = ast.ts_enums,
|
||||
|
||||
.flags = .{
|
||||
.uses_exports_ref = ast.uses_exports_ref,
|
||||
.uses_module_ref = ast.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.uses_export_keyword = ast.export_keyword.len > 0,
|
||||
.has_char_freq = ast.char_freq != null,
|
||||
.force_cjs_to_esm = ast.force_cjs_to_esm,
|
||||
.has_lazy_export = ast.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized,
|
||||
.has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// TODO: Move this from being done on all parse tasks into the start of the linker. This currently allocates base64 encoding for every small file loaded thing.
|
||||
pub fn addUrlForCss(
|
||||
this: *BundledAst,
|
||||
allocator: std.mem.Allocator,
|
||||
source: *const logger.Source,
|
||||
mime_type_: ?[]const u8,
|
||||
unique_key: ?[]const u8,
|
||||
) void {
|
||||
{
|
||||
const mime_type = if (mime_type_) |m| m else MimeType.byExtension(bun.strings.trimLeadingChar(std.fs.path.extension(source.path.text), '.')).value;
|
||||
const contents = source.contents;
|
||||
// TODO: make this configurable
|
||||
const COPY_THRESHOLD = 128 * 1024; // 128kb
|
||||
const should_copy = contents.len >= COPY_THRESHOLD and unique_key != null;
|
||||
if (should_copy) return;
|
||||
this.url_for_css = url_for_css: {
|
||||
|
||||
// Encode as base64
|
||||
const encode_len = bun.base64.encodeLen(contents);
|
||||
const data_url_prefix_len = "data:".len + mime_type.len + ";base64,".len;
|
||||
const total_buffer_len = data_url_prefix_len + encode_len;
|
||||
var encoded = allocator.alloc(u8, total_buffer_len) catch bun.outOfMemory();
|
||||
_ = std.fmt.bufPrint(encoded[0..data_url_prefix_len], "data:{s};base64,", .{mime_type}) catch unreachable;
|
||||
const len = bun.base64.encode(encoded[data_url_prefix_len..], contents);
|
||||
break :url_for_css encoded[0 .. data_url_prefix_len + len];
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const MimeType = bun.http.MimeType;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const BundledAst = js_ast.BundledAst;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const ExportsKind = js_ast.ExportsKind;
|
||||
const Part = js_ast.Part;
|
||||
const Ref = js_ast.Ref;
|
||||
const Scope = js_ast.Scope;
|
||||
const SlotCounts = js_ast.SlotCounts;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const TlaCheck = js_ast.TlaCheck;
|
||||
|
||||
const Ast = js_ast.Ast;
|
||||
pub const CommonJSNamedExports = Ast.CommonJSNamedExports;
|
||||
pub const ConstValuesMap = Ast.ConstValuesMap;
|
||||
pub const NamedExports = Ast.NamedExports;
|
||||
pub const NamedImports = Ast.NamedImports;
|
||||
pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts;
|
||||
139
src/ast/CharFreq.zig
Normal file
139
src/ast/CharFreq.zig
Normal file
@@ -0,0 +1,139 @@
|
||||
pub const char_freq_count = 64;
|
||||
pub const CharAndCount = struct {
|
||||
char: u8 = 0,
|
||||
count: i32 = 0,
|
||||
index: usize = 0,
|
||||
|
||||
pub const Array = [char_freq_count]CharAndCount;
|
||||
|
||||
pub fn lessThan(_: void, a: CharAndCount, b: CharAndCount) bool {
|
||||
if (a.count != b.count) {
|
||||
return a.count > b.count;
|
||||
}
|
||||
|
||||
if (a.index != b.index) {
|
||||
return a.index < b.index;
|
||||
}
|
||||
|
||||
return a.char < b.char;
|
||||
}
|
||||
};
|
||||
|
||||
const Vector = @Vector(char_freq_count, i32);
|
||||
const Buffer = [char_freq_count]i32;
|
||||
|
||||
freqs: Buffer align(1) = undefined,
|
||||
|
||||
const scan_big_chunk_size = 32;
|
||||
pub fn scan(this: *CharFreq, text: string, delta: i32) void {
|
||||
if (delta == 0)
|
||||
return;
|
||||
|
||||
if (text.len < scan_big_chunk_size) {
|
||||
scanSmall(&this.freqs, text, delta);
|
||||
} else {
|
||||
scanBig(&this.freqs, text, delta);
|
||||
}
|
||||
}
|
||||
|
||||
fn scanBig(out: *align(1) Buffer, text: string, delta: i32) void {
|
||||
// https://zig.godbolt.org/z/P5dPojWGK
|
||||
var freqs = out.*;
|
||||
defer out.* = freqs;
|
||||
var deltas: [256]i32 = [_]i32{0} ** 256;
|
||||
var remain = text;
|
||||
|
||||
bun.assert(remain.len >= scan_big_chunk_size);
|
||||
|
||||
const unrolled = remain.len - (remain.len % scan_big_chunk_size);
|
||||
const remain_end = remain.ptr + unrolled;
|
||||
var unrolled_ptr = remain.ptr;
|
||||
remain = remain[unrolled..];
|
||||
|
||||
while (unrolled_ptr != remain_end) : (unrolled_ptr += scan_big_chunk_size) {
|
||||
const chunk = unrolled_ptr[0..scan_big_chunk_size].*;
|
||||
inline for (0..scan_big_chunk_size) |i| {
|
||||
deltas[@as(usize, chunk[i])] += delta;
|
||||
}
|
||||
}
|
||||
|
||||
for (remain) |c| {
|
||||
deltas[@as(usize, c)] += delta;
|
||||
}
|
||||
|
||||
freqs[0..26].* = deltas['a' .. 'a' + 26].*;
|
||||
freqs[26 .. 26 * 2].* = deltas['A' .. 'A' + 26].*;
|
||||
freqs[26 * 2 .. 62].* = deltas['0' .. '0' + 10].*;
|
||||
freqs[62] = deltas['_'];
|
||||
freqs[63] = deltas['$'];
|
||||
}
|
||||
|
||||
fn scanSmall(out: *align(1) Buffer, text: string, delta: i32) void {
|
||||
var freqs: [char_freq_count]i32 = out.*;
|
||||
defer out.* = freqs;
|
||||
|
||||
for (text) |c| {
|
||||
const i: usize = switch (c) {
|
||||
'a'...'z' => @as(usize, @intCast(c)) - 'a',
|
||||
'A'...'Z' => @as(usize, @intCast(c)) - ('A' - 26),
|
||||
'0'...'9' => @as(usize, @intCast(c)) + (53 - '0'),
|
||||
'_' => 62,
|
||||
'$' => 63,
|
||||
else => continue,
|
||||
};
|
||||
freqs[i] += delta;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn include(this: *CharFreq, other: CharFreq) void {
|
||||
// https://zig.godbolt.org/z/Mq8eK6K9s
|
||||
const left: @Vector(char_freq_count, i32) = this.freqs;
|
||||
const right: @Vector(char_freq_count, i32) = other.freqs;
|
||||
|
||||
this.freqs = left + right;
|
||||
}
|
||||
|
||||
pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier {
|
||||
const array: CharAndCount.Array = brk: {
|
||||
var _array: CharAndCount.Array = undefined;
|
||||
|
||||
for (&_array, NameMinifier.default_tail, this.freqs, 0..) |*dest, char, freq, i| {
|
||||
dest.* = CharAndCount{
|
||||
.char = char,
|
||||
.index = i,
|
||||
.count = freq,
|
||||
};
|
||||
}
|
||||
|
||||
std.sort.pdq(CharAndCount, &_array, {}, CharAndCount.lessThan);
|
||||
|
||||
break :brk _array;
|
||||
};
|
||||
|
||||
var minifier = NameMinifier.init(allocator);
|
||||
minifier.head.ensureTotalCapacityPrecise(NameMinifier.default_head.len) catch unreachable;
|
||||
minifier.tail.ensureTotalCapacityPrecise(NameMinifier.default_tail.len) catch unreachable;
|
||||
// TODO: investigate counting number of < 0 and > 0 and pre-allocating
|
||||
for (array) |item| {
|
||||
if (item.char < '0' or item.char > '9') {
|
||||
minifier.head.append(item.char) catch unreachable;
|
||||
}
|
||||
minifier.tail.append(item.char) catch unreachable;
|
||||
}
|
||||
|
||||
return minifier;
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const CharFreq = js_ast.CharFreq;
|
||||
const NameMinifier = js_ast.NameMinifier;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
1441
src/ast/E.zig
Normal file
1441
src/ast/E.zig
Normal file
File diff suppressed because it is too large
Load Diff
3231
src/ast/Expr.zig
Normal file
3231
src/ast/Expr.zig
Normal file
File diff suppressed because it is too large
Load Diff
231
src/ast/G.zig
Normal file
231
src/ast/G.zig
Normal file
@@ -0,0 +1,231 @@
|
||||
pub const Decl = struct {
|
||||
binding: BindingNodeIndex,
|
||||
value: ?ExprNodeIndex = null,
|
||||
|
||||
pub const List = BabyList(Decl);
|
||||
};
|
||||
|
||||
pub const NamespaceAlias = struct {
|
||||
namespace_ref: Ref,
|
||||
alias: string,
|
||||
|
||||
was_originally_property_access: bool = false,
|
||||
|
||||
import_record_index: u32 = std.math.maxInt(u32),
|
||||
};
|
||||
|
||||
pub const ExportStarAlias = struct {
|
||||
loc: logger.Loc,
|
||||
|
||||
// Although this alias name starts off as being the same as the statement's
|
||||
// namespace symbol, it may diverge if the namespace symbol name is minified.
|
||||
// The original alias name is preserved here to avoid this scenario.
|
||||
original_name: string,
|
||||
};
|
||||
|
||||
pub const Class = struct {
|
||||
class_keyword: logger.Range = logger.Range.None,
|
||||
ts_decorators: ExprNodeList = ExprNodeList{},
|
||||
class_name: ?LocRef = null,
|
||||
extends: ?ExprNodeIndex = null,
|
||||
body_loc: logger.Loc = logger.Loc.Empty,
|
||||
close_brace_loc: logger.Loc = logger.Loc.Empty,
|
||||
properties: []Property = &([_]Property{}),
|
||||
has_decorators: bool = false,
|
||||
|
||||
pub fn canBeMoved(this: *const Class) bool {
|
||||
if (this.extends != null)
|
||||
return false;
|
||||
|
||||
if (this.has_decorators) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (this.properties) |property| {
|
||||
if (property.kind == .class_static_block)
|
||||
return false;
|
||||
|
||||
const flags = property.flags;
|
||||
if (flags.contains(.is_computed) or flags.contains(.is_spread)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (property.kind == .normal) {
|
||||
if (flags.contains(.is_static)) {
|
||||
for ([2]?Expr{ property.value, property.initializer }) |val_| {
|
||||
if (val_) |val| {
|
||||
switch (val.data) {
|
||||
.e_arrow, .e_function => {},
|
||||
else => {
|
||||
if (!val.canBeMoved()) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// invalid shadowing if left as Comment
|
||||
pub const Comment = struct { loc: logger.Loc, text: string };
|
||||
|
||||
pub const ClassStaticBlock = struct {
|
||||
stmts: BabyList(Stmt) = .{},
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub const Property = struct {
|
||||
/// This is used when parsing a pattern that uses default values:
|
||||
///
|
||||
/// [a = 1] = [];
|
||||
/// ({a = 1} = {});
|
||||
///
|
||||
/// It's also used for class fields:
|
||||
///
|
||||
/// class Foo { a = 1 }
|
||||
///
|
||||
initializer: ?ExprNodeIndex = null,
|
||||
kind: Kind = .normal,
|
||||
flags: Flags.Property.Set = Flags.Property.None,
|
||||
|
||||
class_static_block: ?*ClassStaticBlock = null,
|
||||
ts_decorators: ExprNodeList = .{},
|
||||
// Key is optional for spread
|
||||
key: ?ExprNodeIndex = null,
|
||||
|
||||
// This is omitted for class fields
|
||||
value: ?ExprNodeIndex = null,
|
||||
|
||||
ts_metadata: TypeScript.Metadata = .m_none,
|
||||
|
||||
pub const List = BabyList(Property);
|
||||
|
||||
pub fn deepClone(this: *const Property, allocator: std.mem.Allocator) !Property {
|
||||
var class_static_block: ?*ClassStaticBlock = null;
|
||||
if (this.class_static_block != null) {
|
||||
class_static_block = bun.create(allocator, ClassStaticBlock, .{
|
||||
.loc = this.class_static_block.?.loc,
|
||||
.stmts = try this.class_static_block.?.stmts.clone(allocator),
|
||||
});
|
||||
}
|
||||
return .{
|
||||
.initializer = if (this.initializer) |init| try init.deepClone(allocator) else null,
|
||||
.kind = this.kind,
|
||||
.flags = this.flags,
|
||||
.class_static_block = class_static_block,
|
||||
.ts_decorators = try this.ts_decorators.deepClone(allocator),
|
||||
.key = if (this.key) |key| try key.deepClone(allocator) else null,
|
||||
.value = if (this.value) |value| try value.deepClone(allocator) else null,
|
||||
.ts_metadata = this.ts_metadata,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Kind = enum(u3) {
|
||||
normal,
|
||||
get,
|
||||
set,
|
||||
spread,
|
||||
declare,
|
||||
abstract,
|
||||
class_static_block,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const FnBody = struct {
|
||||
loc: logger.Loc,
|
||||
stmts: StmtNodeList,
|
||||
|
||||
pub fn initReturnExpr(allocator: std.mem.Allocator, expr: Expr) !FnBody {
|
||||
return .{
|
||||
.stmts = try allocator.dupe(Stmt, &.{Stmt.alloc(S.Return, .{
|
||||
.value = expr,
|
||||
}, expr.loc)}),
|
||||
.loc = expr.loc,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Fn = struct {
|
||||
name: ?LocRef = null,
|
||||
open_parens_loc: logger.Loc = logger.Loc.Empty,
|
||||
args: []Arg = &.{},
|
||||
// This was originally nullable, but doing so I believe caused a miscompilation
|
||||
// Specifically, the body was always null.
|
||||
body: FnBody = .{ .loc = logger.Loc.Empty, .stmts = &.{} },
|
||||
arguments_ref: ?Ref = null,
|
||||
|
||||
flags: Flags.Function.Set = Flags.Function.None,
|
||||
|
||||
return_ts_metadata: TypeScript.Metadata = .m_none,
|
||||
|
||||
pub fn deepClone(this: *const Fn, allocator: std.mem.Allocator) !Fn {
|
||||
const args = try allocator.alloc(Arg, this.args.len);
|
||||
for (0..args.len) |i| {
|
||||
args[i] = try this.args[i].deepClone(allocator);
|
||||
}
|
||||
return .{
|
||||
.name = this.name,
|
||||
.open_parens_loc = this.open_parens_loc,
|
||||
.args = args,
|
||||
.body = .{
|
||||
.loc = this.body.loc,
|
||||
.stmts = this.body.stmts,
|
||||
},
|
||||
.arguments_ref = this.arguments_ref,
|
||||
.flags = this.flags,
|
||||
.return_ts_metadata = this.return_ts_metadata,
|
||||
};
|
||||
}
|
||||
};
|
||||
pub const Arg = struct {
|
||||
ts_decorators: ExprNodeList = ExprNodeList{},
|
||||
binding: BindingNodeIndex,
|
||||
default: ?ExprNodeIndex = null,
|
||||
|
||||
// "constructor(public x: boolean) {}"
|
||||
is_typescript_ctor_field: bool = false,
|
||||
|
||||
ts_metadata: TypeScript.Metadata = .m_none,
|
||||
|
||||
pub fn deepClone(this: *const Arg, allocator: std.mem.Allocator) !Arg {
|
||||
return .{
|
||||
.ts_decorators = try this.ts_decorators.deepClone(allocator),
|
||||
.binding = this.binding,
|
||||
.default = if (this.default) |d| try d.deepClone(allocator) else null,
|
||||
.is_typescript_ctor_field = this.is_typescript_ctor_field,
|
||||
.ts_metadata = this.ts_metadata,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const TypeScript = bun.js_parser.TypeScript;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const BindingNodeIndex = js_ast.BindingNodeIndex;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const Ref = js_ast.Ref;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const StmtNodeList = js_ast.StmtNodeList;
|
||||
671
src/ast/Macro.zig
Normal file
671
src/ast/Macro.zig
Normal file
@@ -0,0 +1,671 @@
|
||||
pub const namespace: string = "macro";
|
||||
pub const namespaceWithColon: string = namespace ++ ":";
|
||||
|
||||
pub fn isMacroPath(str: string) bool {
|
||||
return strings.hasPrefixComptime(str, namespaceWithColon);
|
||||
}
|
||||
|
||||
pub const MacroContext = struct {
|
||||
pub const MacroMap = std.AutoArrayHashMap(i32, Macro);
|
||||
|
||||
resolver: *Resolver,
|
||||
env: *DotEnv.Loader,
|
||||
macros: MacroMap,
|
||||
remap: MacroRemap,
|
||||
javascript_object: JSC.JSValue = JSC.JSValue.zero,
|
||||
|
||||
pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry {
|
||||
if (this.remap.entries.len == 0) return null;
|
||||
return this.remap.get(path);
|
||||
}
|
||||
|
||||
pub fn init(transpiler: *Transpiler) MacroContext {
|
||||
return MacroContext{
|
||||
.macros = MacroMap.init(default_allocator),
|
||||
.resolver = &transpiler.resolver,
|
||||
.env = transpiler.env,
|
||||
.remap = transpiler.options.macro_remap,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn call(
|
||||
this: *MacroContext,
|
||||
import_record_path: string,
|
||||
source_dir: string,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
import_range: logger.Range,
|
||||
caller: Expr,
|
||||
function_name: string,
|
||||
) anyerror!Expr {
|
||||
Expr.Data.Store.disable_reset = true;
|
||||
Stmt.Data.Store.disable_reset = true;
|
||||
defer Expr.Data.Store.disable_reset = false;
|
||||
defer Stmt.Data.Store.disable_reset = false;
|
||||
// const is_package_path = isPackagePath(specifier);
|
||||
const import_record_path_without_macro_prefix = if (isMacroPath(import_record_path))
|
||||
import_record_path[namespaceWithColon.len..]
|
||||
else
|
||||
import_record_path;
|
||||
|
||||
bun.assert(!isMacroPath(import_record_path_without_macro_prefix));
|
||||
|
||||
const input_specifier = brk: {
|
||||
if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| {
|
||||
break :brk replacement.path;
|
||||
}
|
||||
|
||||
const resolve_result = this.resolver.resolve(source_dir, import_record_path_without_macro_prefix, .stmt) catch |err| {
|
||||
switch (err) {
|
||||
error.ModuleNotFound => {
|
||||
log.addResolveError(
|
||||
source,
|
||||
import_range,
|
||||
log.msgs.allocator,
|
||||
"Macro \"{s}\" not found",
|
||||
.{import_record_path},
|
||||
.stmt,
|
||||
err,
|
||||
) catch unreachable;
|
||||
return error.MacroNotFound;
|
||||
},
|
||||
else => {
|
||||
log.addRangeErrorFmt(
|
||||
source,
|
||||
import_range,
|
||||
log.msgs.allocator,
|
||||
"{s} resolving macro \"{s}\"",
|
||||
.{ @errorName(err), import_record_path },
|
||||
) catch unreachable;
|
||||
return err;
|
||||
},
|
||||
}
|
||||
};
|
||||
break :brk resolve_result.path_pair.primary.text;
|
||||
};
|
||||
|
||||
var specifier_buf: [64]u8 = undefined;
|
||||
var specifier_buf_len: u32 = 0;
|
||||
const hash = MacroEntryPoint.generateID(
|
||||
input_specifier,
|
||||
function_name,
|
||||
&specifier_buf,
|
||||
&specifier_buf_len,
|
||||
);
|
||||
|
||||
const macro_entry = this.macros.getOrPut(hash) catch unreachable;
|
||||
if (!macro_entry.found_existing) {
|
||||
macro_entry.value_ptr.* = Macro.init(
|
||||
default_allocator,
|
||||
this.resolver,
|
||||
input_specifier,
|
||||
log,
|
||||
this.env,
|
||||
function_name,
|
||||
specifier_buf[0..specifier_buf_len],
|
||||
hash,
|
||||
) catch |err| {
|
||||
macro_entry.value_ptr.* = Macro{ .resolver = undefined, .disabled = true };
|
||||
return err;
|
||||
};
|
||||
Output.flush();
|
||||
}
|
||||
defer Output.flush();
|
||||
|
||||
const macro = macro_entry.value_ptr.*;
|
||||
if (macro.disabled) {
|
||||
return caller;
|
||||
}
|
||||
macro.vm.enableMacroMode();
|
||||
defer macro.vm.disableMacroMode();
|
||||
macro.vm.eventLoop().ensureWaker();
|
||||
|
||||
const Wrapper = struct {
|
||||
args: std.meta.ArgsTuple(@TypeOf(Macro.Runner.run)),
|
||||
ret: Runner.MacroError!Expr,
|
||||
|
||||
pub fn call(self: *@This()) void {
|
||||
self.ret = @call(.auto, Macro.Runner.run, self.args);
|
||||
}
|
||||
};
|
||||
var wrapper = Wrapper{
|
||||
.args = .{
|
||||
macro,
|
||||
log,
|
||||
default_allocator,
|
||||
function_name,
|
||||
caller,
|
||||
source,
|
||||
hash,
|
||||
this.javascript_object,
|
||||
},
|
||||
.ret = undefined,
|
||||
};
|
||||
|
||||
macro.vm.runWithAPILock(Wrapper, &wrapper, Wrapper.call);
|
||||
return try wrapper.ret;
|
||||
// this.macros.getOrPut(key: K)
|
||||
}
|
||||
};
|
||||
|
||||
pub const MacroResult = struct {
|
||||
import_statements: []S.Import = &[_]S.Import{},
|
||||
replacement: Expr,
|
||||
};
|
||||
|
||||
resolver: *Resolver,
|
||||
vm: *JavaScript.VirtualMachine = undefined,
|
||||
|
||||
resolved: ResolveResult = undefined,
|
||||
disabled: bool = false,
|
||||
|
||||
pub fn init(
|
||||
_: std.mem.Allocator,
|
||||
resolver: *Resolver,
|
||||
input_specifier: []const u8,
|
||||
log: *logger.Log,
|
||||
env: *DotEnv.Loader,
|
||||
function_name: string,
|
||||
specifier: string,
|
||||
hash: i32,
|
||||
) !Macro {
|
||||
var vm: *JavaScript.VirtualMachine = if (JavaScript.VirtualMachine.isLoaded())
|
||||
JavaScript.VirtualMachine.get()
|
||||
else brk: {
|
||||
const old_transform_options = resolver.opts.transform_options;
|
||||
defer resolver.opts.transform_options = old_transform_options;
|
||||
|
||||
// JSC needs to be initialized if building from CLI
|
||||
JSC.initialize(false);
|
||||
|
||||
var _vm = try JavaScript.VirtualMachine.init(.{
|
||||
.allocator = default_allocator,
|
||||
.args = resolver.opts.transform_options,
|
||||
.log = log,
|
||||
.is_main_thread = false,
|
||||
.env_loader = env,
|
||||
});
|
||||
|
||||
_vm.enableMacroMode();
|
||||
_vm.eventLoop().ensureWaker();
|
||||
|
||||
try _vm.transpiler.configureDefines();
|
||||
break :brk _vm;
|
||||
};
|
||||
|
||||
vm.enableMacroMode();
|
||||
vm.eventLoop().ensureWaker();
|
||||
|
||||
const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash);
|
||||
|
||||
switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) {
|
||||
.rejected => |result| {
|
||||
vm.unhandledRejection(vm.global, result, loaded_result.asValue());
|
||||
vm.disableMacroMode();
|
||||
return error.MacroLoadError;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return Macro{
|
||||
.vm = vm,
|
||||
.resolver = resolver,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Runner = struct {
|
||||
const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr);
|
||||
|
||||
threadlocal var args_buf: [3]js.JSObjectRef = undefined;
|
||||
threadlocal var exception_holder: JSC.ZigException.Holder = undefined;
|
||||
pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError;
|
||||
|
||||
pub const Run = struct {
|
||||
caller: Expr,
|
||||
function_name: string,
|
||||
macro: *const Macro,
|
||||
global: *JSC.JSGlobalObject,
|
||||
allocator: std.mem.Allocator,
|
||||
id: i32,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
visited: VisitMap = VisitMap{},
|
||||
is_top_level: bool = false,
|
||||
|
||||
pub fn runAsync(
|
||||
macro: Macro,
|
||||
log: *logger.Log,
|
||||
allocator: std.mem.Allocator,
|
||||
function_name: string,
|
||||
caller: Expr,
|
||||
args: []JSC.JSValue,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
) MacroError!Expr {
|
||||
const macro_callback = macro.vm.macros.get(id) orelse return caller;
|
||||
|
||||
const result = js.JSObjectCallAsFunctionReturnValueHoldingAPILock(
|
||||
macro.vm.global,
|
||||
macro_callback,
|
||||
null,
|
||||
args.len,
|
||||
@as([*]js.JSObjectRef, @ptrCast(args.ptr)),
|
||||
);
|
||||
|
||||
var runner = Run{
|
||||
.caller = caller,
|
||||
.function_name = function_name,
|
||||
.macro = ¯o,
|
||||
.allocator = allocator,
|
||||
.global = macro.vm.global,
|
||||
.id = id,
|
||||
.log = log,
|
||||
.source = source,
|
||||
.visited = VisitMap{},
|
||||
};
|
||||
|
||||
defer runner.visited.deinit(allocator);
|
||||
|
||||
return try runner.run(
|
||||
result,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
this: *Run,
|
||||
value: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
return switch ((try JSC.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) {
|
||||
.Error => this.coerce(value, .Error),
|
||||
.Undefined => this.coerce(value, .Undefined),
|
||||
.Null => this.coerce(value, .Null),
|
||||
.Private => this.coerce(value, .Private),
|
||||
.Boolean => this.coerce(value, .Boolean),
|
||||
.Array => this.coerce(value, .Array),
|
||||
.Object => this.coerce(value, .Object),
|
||||
.toJSON, .JSON => this.coerce(value, .JSON),
|
||||
.Integer => this.coerce(value, .Integer),
|
||||
.Double => this.coerce(value, .Double),
|
||||
.String => this.coerce(value, .String),
|
||||
.Promise => this.coerce(value, .Promise),
|
||||
else => brk: {
|
||||
const name = value.getClassInfoName() orelse "unknown";
|
||||
|
||||
this.log.addErrorFmt(
|
||||
this.source,
|
||||
this.caller.loc,
|
||||
this.allocator,
|
||||
"cannot coerce {s} ({s}) to Bun's AST. Please return a simpler type",
|
||||
.{ name, @tagName(value.jsType()) },
|
||||
) catch unreachable;
|
||||
break :brk error.MacroFailed;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn coerce(
|
||||
this: *Run,
|
||||
value: JSC.JSValue,
|
||||
comptime tag: JSC.ConsoleObject.Formatter.Tag,
|
||||
) MacroError!Expr {
|
||||
switch (comptime tag) {
|
||||
.Error => {
|
||||
_ = this.macro.vm.uncaughtException(this.global, value, false);
|
||||
return this.caller;
|
||||
},
|
||||
.Undefined => if (this.is_top_level)
|
||||
return this.caller
|
||||
else
|
||||
return Expr.init(E.Undefined, E.Undefined{}, this.caller.loc),
|
||||
.Null => return Expr.init(E.Null, E.Null{}, this.caller.loc),
|
||||
.Private => {
|
||||
this.is_top_level = false;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var blob_: ?JSC.WebCore.Blob = null;
|
||||
const mime_type: ?MimeType = null;
|
||||
|
||||
if (value.jsType() == .DOMWrapper) {
|
||||
if (value.as(JSC.WebCore.Response)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(JSC.WebCore.Request)) |resp| {
|
||||
return this.run(try resp.getBlobWithoutCallFrame(this.global));
|
||||
} else if (value.as(JSC.WebCore.Blob)) |resp| {
|
||||
blob_ = resp.*;
|
||||
blob_.?.allocator = null;
|
||||
} else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) {
|
||||
_ = this.macro.vm.uncaughtException(this.global, value, false);
|
||||
return error.MacroFailed;
|
||||
}
|
||||
}
|
||||
|
||||
if (blob_) |*blob| {
|
||||
const out_expr = Expr.fromBlob(
|
||||
blob,
|
||||
this.allocator,
|
||||
mime_type,
|
||||
this.log,
|
||||
this.caller.loc,
|
||||
) catch {
|
||||
blob.deinit();
|
||||
return error.MacroFailed;
|
||||
};
|
||||
if (out_expr.data == .e_string) {
|
||||
blob.deinit();
|
||||
}
|
||||
|
||||
return out_expr;
|
||||
}
|
||||
|
||||
return Expr.init(E.String, E.String.empty, this.caller.loc);
|
||||
},
|
||||
|
||||
.Boolean => {
|
||||
return Expr{ .data = .{ .e_boolean = .{ .value = value.toBoolean() } }, .loc = this.caller.loc };
|
||||
},
|
||||
JSC.ConsoleObject.Formatter.Tag.Array => {
|
||||
this.is_top_level = false;
|
||||
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
switch (_entry.value_ptr.*.data) {
|
||||
.e_object, .e_array => {
|
||||
this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable;
|
||||
return error.MacroFailed;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var iter = try JSC.JSArrayIterator.init(value, this.global);
|
||||
if (iter.len == 0) {
|
||||
const result = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(&[_]Expr{}),
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
);
|
||||
_entry.value_ptr.* = result;
|
||||
return result;
|
||||
}
|
||||
var array = this.allocator.alloc(Expr, iter.len) catch unreachable;
|
||||
var out = Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.init(array[0..0]),
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
);
|
||||
_entry.value_ptr.* = out;
|
||||
|
||||
errdefer this.allocator.free(array);
|
||||
var i: usize = 0;
|
||||
while (try iter.next()) |item| {
|
||||
array[i] = try this.run(item);
|
||||
if (array[i].isMissing())
|
||||
continue;
|
||||
i += 1;
|
||||
}
|
||||
out.data.e_array.items = ExprNodeList.init(array);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
},
|
||||
// TODO: optimize this
|
||||
JSC.ConsoleObject.Formatter.Tag.Object => {
|
||||
this.is_top_level = false;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
switch (_entry.value_ptr.*.data) {
|
||||
.e_object, .e_array => {
|
||||
this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable;
|
||||
return error.MacroFailed;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
// SAFETY: tag ensures `value` is an object.
|
||||
const obj = value.getObject() orelse unreachable;
|
||||
var object_iter = try JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(this.global, obj);
|
||||
defer object_iter.deinit();
|
||||
var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable;
|
||||
errdefer this.allocator.free(properties);
|
||||
var out = Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = BabyList(G.Property).init(properties),
|
||||
.was_originally_macro = true,
|
||||
},
|
||||
this.caller.loc,
|
||||
);
|
||||
_entry.value_ptr.* = out;
|
||||
|
||||
while (try object_iter.next()) |prop| {
|
||||
properties[object_iter.i] = G.Property{
|
||||
.key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc),
|
||||
.value = try this.run(object_iter.value),
|
||||
};
|
||||
}
|
||||
out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]);
|
||||
_entry.value_ptr.* = out;
|
||||
return out;
|
||||
},
|
||||
|
||||
.JSON => {
|
||||
this.is_top_level = false;
|
||||
// if (console_tag.cell == .JSDate) {
|
||||
// // in the code for printing dates, it never exceeds this amount
|
||||
// var iso_string_buf = this.allocator.alloc(u8, 36) catch unreachable;
|
||||
// var str = JSC.ZigString.init("");
|
||||
// value.jsonStringify(this.global, 0, &str);
|
||||
// var out_buf: []const u8 = std.fmt.bufPrint(iso_string_buf, "{}", .{str}) catch "";
|
||||
// if (out_buf.len > 2) {
|
||||
// // trim the quotes
|
||||
// out_buf = out_buf[1 .. out_buf.len - 1];
|
||||
// }
|
||||
// return Expr.init(E.New, E.New{.target = Expr.init(E.Dot{.target = E}) })
|
||||
// }
|
||||
},
|
||||
|
||||
.Integer => {
|
||||
return Expr.init(E.Number, E.Number{ .value = @as(f64, @floatFromInt(value.toInt32())) }, this.caller.loc);
|
||||
},
|
||||
.Double => {
|
||||
return Expr.init(E.Number, E.Number{ .value = value.asNumber() }, this.caller.loc);
|
||||
},
|
||||
.String => {
|
||||
var bun_str = try value.toBunString(this.global);
|
||||
defer bun_str.deref();
|
||||
|
||||
// encode into utf16 so the printer escapes the string correctly
|
||||
var utf16_bytes = this.allocator.alloc(u16, bun_str.length()) catch unreachable;
|
||||
const out_slice = utf16_bytes[0 .. (bun_str.encodeInto(std.mem.sliceAsBytes(utf16_bytes), .utf16le) catch 0) / 2];
|
||||
return Expr.init(E.String, E.String.init(out_slice), this.caller.loc);
|
||||
},
|
||||
.Promise => {
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
const promise = value.asAnyPromise() orelse @panic("Unexpected promise type");
|
||||
|
||||
this.macro.vm.waitForPromise(promise);
|
||||
|
||||
const promise_result = promise.result(this.macro.vm.jsc);
|
||||
const rejected = promise.status(this.macro.vm.jsc) == .rejected;
|
||||
|
||||
if (promise_result.isUndefined() and this.is_top_level) {
|
||||
this.is_top_level = false;
|
||||
return this.caller;
|
||||
}
|
||||
|
||||
if (rejected or promise_result.isError() or promise_result.isAggregateError(this.global) or promise_result.isException(this.global.vm())) {
|
||||
this.macro.vm.unhandledRejection(this.global, promise_result, promise.asValue());
|
||||
return error.MacroFailed;
|
||||
}
|
||||
this.is_top_level = false;
|
||||
const result = try this.run(promise_result);
|
||||
|
||||
_entry.value_ptr.* = result;
|
||||
return result;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
this.log.addErrorFmt(
|
||||
this.source,
|
||||
this.caller.loc,
|
||||
this.allocator,
|
||||
"cannot coerce {s} to Bun's AST. Please return a simpler type",
|
||||
.{@tagName(value.jsType())},
|
||||
) catch unreachable;
|
||||
return error.MacroFailed;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn run(
|
||||
macro: Macro,
|
||||
log: *logger.Log,
|
||||
allocator: std.mem.Allocator,
|
||||
function_name: string,
|
||||
caller: Expr,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
javascript_object: JSC.JSValue,
|
||||
) MacroError!Expr {
|
||||
if (comptime Environment.isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{s}<r>", .{function_name});
|
||||
|
||||
exception_holder = JSC.ZigException.Holder.init();
|
||||
var js_args: []JSC.JSValue = &.{};
|
||||
var js_processed_args_len: usize = 0;
|
||||
defer {
|
||||
for (js_args[0..js_processed_args_len -| @as(usize, @intFromBool(javascript_object != .zero))]) |arg| {
|
||||
arg.unprotect();
|
||||
}
|
||||
|
||||
allocator.free(js_args);
|
||||
}
|
||||
|
||||
const globalObject = JSC.VirtualMachine.get().global;
|
||||
|
||||
switch (caller.data) {
|
||||
.e_call => |call| {
|
||||
const call_args: []Expr = call.args.slice();
|
||||
js_args = try allocator.alloc(JSC.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero)));
|
||||
js_processed_args_len = js_args.len;
|
||||
|
||||
for (0.., call_args, js_args[0..call_args.len]) |i, in, *out| {
|
||||
const value = in.toJS(
|
||||
allocator,
|
||||
globalObject,
|
||||
) catch |e| {
|
||||
// Keeping a separate variable instead of modifying js_args.len
|
||||
// due to allocator.free call in defer
|
||||
js_processed_args_len = i;
|
||||
return e;
|
||||
};
|
||||
value.protect();
|
||||
out.* = value;
|
||||
}
|
||||
},
|
||||
.e_template => {
|
||||
@panic("TODO: support template literals in macros");
|
||||
},
|
||||
else => {
|
||||
@panic("Unexpected caller type");
|
||||
},
|
||||
}
|
||||
|
||||
if (javascript_object != .zero) {
|
||||
if (js_args.len == 0) {
|
||||
js_args = try allocator.alloc(JSC.JSValue, 1);
|
||||
}
|
||||
|
||||
js_args[js_args.len - 1] = javascript_object;
|
||||
}
|
||||
|
||||
const CallFunction = @TypeOf(Run.runAsync);
|
||||
const CallArgs = std.meta.ArgsTuple(CallFunction);
|
||||
const CallData = struct {
|
||||
threadlocal var call_args: CallArgs = undefined;
|
||||
threadlocal var result: MacroError!Expr = undefined;
|
||||
pub fn callWrapper(args: CallArgs) MacroError!Expr {
|
||||
JSC.markBinding(@src());
|
||||
call_args = args;
|
||||
Bun__startMacro(&call, JSC.VirtualMachine.get().global);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn call() callconv(.C) void {
|
||||
const call_args_copy = call_args;
|
||||
const local_result = @call(.auto, Run.runAsync, call_args_copy);
|
||||
result = local_result;
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: can change back to `return CallData.callWrapper(.{`
|
||||
// when https://github.com/ziglang/zig/issues/16242 is fixed
|
||||
return CallData.callWrapper(CallArgs{
|
||||
macro,
|
||||
log,
|
||||
allocator,
|
||||
function_name,
|
||||
caller,
|
||||
js_args,
|
||||
source,
|
||||
id,
|
||||
});
|
||||
}
|
||||
|
||||
extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void;
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const DotEnv = @import("../env_loader.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
|
||||
const MacroRemapEntry = @import("../resolver/package_json.zig").MacroImportReplacementMap;
|
||||
|
||||
const ResolveResult = @import("../resolver/resolver.zig").Result;
|
||||
const Resolver = @import("../resolver/resolver.zig").Resolver;
|
||||
const isPackagePath = @import("../resolver/resolver.zig").isPackagePath;
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const Transpiler = bun.Transpiler;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const Loader = bun.options.Loader;
|
||||
const MimeType = bun.http.MimeType;
|
||||
const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint;
|
||||
|
||||
const JSC = bun.JSC;
|
||||
const JavaScript = bun.JSC;
|
||||
const js = bun.JSC.C;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const G = js_ast.G;
|
||||
const Macro = js_ast.Macro;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const ToJSError = js_ast.ToJSError;
|
||||
167
src/ast/NewStore.zig
Normal file
167
src/ast/NewStore.zig
Normal file
@@ -0,0 +1,167 @@
|
||||
/// This "Store" is a specialized memory allocation strategy very similar to an
|
||||
/// arena, used for allocating expression and statement nodes during JavaScript
|
||||
/// parsing and visiting. Allocations are grouped into large blocks, where each
|
||||
/// block is treated as a fixed-buffer allocator. When a block runs out of
|
||||
/// space, a new one is created; all blocks are joined as a linked list.
|
||||
///
|
||||
/// Similarly to an arena, you can call .reset() to reset state, reusing memory
|
||||
/// across operations.
|
||||
pub fn NewStore(comptime types: []const type, comptime count: usize) type {
|
||||
const largest_size, const largest_align = brk: {
|
||||
var largest_size = 0;
|
||||
var largest_align = 1;
|
||||
for (types) |T| {
|
||||
if (@sizeOf(T) == 0) {
|
||||
@compileError("NewStore does not support 0 size type: " ++ @typeName(T));
|
||||
}
|
||||
largest_size = @max(@sizeOf(T), largest_size);
|
||||
largest_align = @max(@alignOf(T), largest_align);
|
||||
}
|
||||
break :brk .{ largest_size, largest_align };
|
||||
};
|
||||
|
||||
const backing_allocator = bun.default_allocator;
|
||||
|
||||
const log = Output.scoped(.Store, true);
|
||||
|
||||
return struct {
|
||||
const Store = @This();
|
||||
|
||||
current: *Block,
|
||||
debug_lock: std.debug.SafetyLock = .{},
|
||||
|
||||
pub const Block = struct {
|
||||
pub const size = largest_size * count * 2;
|
||||
pub const Size = std.math.IntFittingRange(0, size + largest_size);
|
||||
|
||||
buffer: [size]u8 align(largest_align) = undefined,
|
||||
bytes_used: Size = 0,
|
||||
next: ?*Block = null,
|
||||
|
||||
pub fn tryAlloc(block: *Block, comptime T: type) ?*T {
|
||||
const start = std.mem.alignForward(usize, block.bytes_used, @alignOf(T));
|
||||
if (start + @sizeOf(T) > block.buffer.len) return null;
|
||||
defer block.bytes_used = @intCast(start + @sizeOf(T));
|
||||
|
||||
// it's simpler to use @ptrCast, but as a sanity check, we also
|
||||
// try to compute the slice. Zig will report an out of bounds
|
||||
// panic if the null detection logic above is wrong
|
||||
if (Environment.isDebug) {
|
||||
_ = block.buffer[block.bytes_used..][0..@sizeOf(T)];
|
||||
}
|
||||
|
||||
return @alignCast(@ptrCast(&block.buffer[start]));
|
||||
}
|
||||
};
|
||||
|
||||
const PreAlloc = struct {
|
||||
metadata: Store,
|
||||
first_block: Block,
|
||||
};
|
||||
|
||||
pub fn firstBlock(store: *Store) *Block {
|
||||
return &@as(*PreAlloc, @fieldParentPtr("metadata", store)).first_block;
|
||||
}
|
||||
|
||||
pub fn init() *Store {
|
||||
log("init", .{});
|
||||
const prealloc = backing_allocator.create(PreAlloc) catch bun.outOfMemory();
|
||||
|
||||
prealloc.first_block.bytes_used = 0;
|
||||
prealloc.first_block.next = null;
|
||||
|
||||
prealloc.metadata = .{
|
||||
.current = &prealloc.first_block,
|
||||
};
|
||||
|
||||
return &prealloc.metadata;
|
||||
}
|
||||
|
||||
pub fn deinit(store: *Store) void {
|
||||
log("deinit", .{});
|
||||
var it = store.firstBlock().next; // do not free `store.head`
|
||||
while (it) |next| {
|
||||
if (Environment.isDebug or Environment.enable_asan)
|
||||
@memset(next.buffer, undefined);
|
||||
it = next.next;
|
||||
backing_allocator.destroy(next);
|
||||
}
|
||||
|
||||
const prealloc: PreAlloc = @fieldParentPtr("metadata", store);
|
||||
bun.assert(&prealloc.first_block == store.head);
|
||||
backing_allocator.destroy(prealloc);
|
||||
}
|
||||
|
||||
pub fn reset(store: *Store) void {
|
||||
log("reset", .{});
|
||||
|
||||
if (Environment.isDebug or Environment.enable_asan) {
|
||||
var it: ?*Block = store.firstBlock();
|
||||
while (it) |next| : (it = next.next) {
|
||||
next.bytes_used = undefined;
|
||||
@memset(&next.buffer, undefined);
|
||||
}
|
||||
}
|
||||
|
||||
store.current = store.firstBlock();
|
||||
store.current.bytes_used = 0;
|
||||
}
|
||||
|
||||
fn allocate(store: *Store, comptime T: type) *T {
|
||||
comptime bun.assert(@sizeOf(T) > 0); // don't allocate!
|
||||
comptime if (!supportsType(T)) {
|
||||
@compileError("Store does not know about type: " ++ @typeName(T));
|
||||
};
|
||||
|
||||
if (store.current.tryAlloc(T)) |ptr|
|
||||
return ptr;
|
||||
|
||||
// a new block is needed
|
||||
const next_block = if (store.current.next) |next| brk: {
|
||||
next.bytes_used = 0;
|
||||
break :brk next;
|
||||
} else brk: {
|
||||
const new_block = backing_allocator.create(Block) catch
|
||||
bun.outOfMemory();
|
||||
new_block.next = null;
|
||||
new_block.bytes_used = 0;
|
||||
store.current.next = new_block;
|
||||
break :brk new_block;
|
||||
};
|
||||
|
||||
store.current = next_block;
|
||||
|
||||
return next_block.tryAlloc(T) orelse
|
||||
unreachable; // newly initialized blocks must have enough space for at least one
|
||||
}
|
||||
|
||||
pub inline fn append(store: *Store, comptime T: type, data: T) *T {
|
||||
const ptr = store.allocate(T);
|
||||
if (Environment.isDebug) {
|
||||
log("append({s}) -> 0x{x}", .{ bun.meta.typeName(T), @intFromPtr(ptr) });
|
||||
}
|
||||
ptr.* = data;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
pub fn lock(store: *Store) void {
|
||||
store.debug_lock.lock();
|
||||
}
|
||||
|
||||
pub fn unlock(store: *Store) void {
|
||||
store.debug_lock.unlock();
|
||||
}
|
||||
|
||||
fn supportsType(T: type) bool {
|
||||
return std.mem.indexOfScalar(type, types, T) != null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
293
src/ast/Op.zig
Normal file
293
src/ast/Op.zig
Normal file
@@ -0,0 +1,293 @@
|
||||
// If you add a new token, remember to add it to "Table" too
|
||||
pub const Code = enum {
|
||||
// Prefix
|
||||
un_pos, // +expr
|
||||
un_neg, // -expr
|
||||
un_cpl, // ~expr
|
||||
un_not, // !expr
|
||||
un_void,
|
||||
un_typeof,
|
||||
un_delete,
|
||||
|
||||
// Prefix update
|
||||
un_pre_dec,
|
||||
un_pre_inc,
|
||||
|
||||
// Postfix update
|
||||
un_post_dec,
|
||||
un_post_inc,
|
||||
|
||||
/// Left-associative
|
||||
bin_add,
|
||||
/// Left-associative
|
||||
bin_sub,
|
||||
/// Left-associative
|
||||
bin_mul,
|
||||
/// Left-associative
|
||||
bin_div,
|
||||
/// Left-associative
|
||||
bin_rem,
|
||||
/// Left-associative
|
||||
bin_pow,
|
||||
/// Left-associative
|
||||
bin_lt,
|
||||
/// Left-associative
|
||||
bin_le,
|
||||
/// Left-associative
|
||||
bin_gt,
|
||||
/// Left-associative
|
||||
bin_ge,
|
||||
/// Left-associative
|
||||
bin_in,
|
||||
/// Left-associative
|
||||
bin_instanceof,
|
||||
/// Left-associative
|
||||
bin_shl,
|
||||
/// Left-associative
|
||||
bin_shr,
|
||||
/// Left-associative
|
||||
bin_u_shr,
|
||||
/// Left-associative
|
||||
bin_loose_eq,
|
||||
/// Left-associative
|
||||
bin_loose_ne,
|
||||
/// Left-associative
|
||||
bin_strict_eq,
|
||||
/// Left-associative
|
||||
bin_strict_ne,
|
||||
/// Left-associative
|
||||
bin_nullish_coalescing,
|
||||
/// Left-associative
|
||||
bin_logical_or,
|
||||
/// Left-associative
|
||||
bin_logical_and,
|
||||
/// Left-associative
|
||||
bin_bitwise_or,
|
||||
/// Left-associative
|
||||
bin_bitwise_and,
|
||||
/// Left-associative
|
||||
bin_bitwise_xor,
|
||||
|
||||
/// Non-associative
|
||||
bin_comma,
|
||||
|
||||
/// Right-associative
|
||||
bin_assign,
|
||||
/// Right-associative
|
||||
bin_add_assign,
|
||||
/// Right-associative
|
||||
bin_sub_assign,
|
||||
/// Right-associative
|
||||
bin_mul_assign,
|
||||
/// Right-associative
|
||||
bin_div_assign,
|
||||
/// Right-associative
|
||||
bin_rem_assign,
|
||||
/// Right-associative
|
||||
bin_pow_assign,
|
||||
/// Right-associative
|
||||
bin_shl_assign,
|
||||
/// Right-associative
|
||||
bin_shr_assign,
|
||||
/// Right-associative
|
||||
bin_u_shr_assign,
|
||||
/// Right-associative
|
||||
bin_bitwise_or_assign,
|
||||
/// Right-associative
|
||||
bin_bitwise_and_assign,
|
||||
/// Right-associative
|
||||
bin_bitwise_xor_assign,
|
||||
/// Right-associative
|
||||
bin_nullish_coalescing_assign,
|
||||
/// Right-associative
|
||||
bin_logical_or_assign,
|
||||
/// Right-associative
|
||||
bin_logical_and_assign,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
pub fn unaryAssignTarget(code: Op.Code) AssignTarget {
|
||||
if (@intFromEnum(code) >=
|
||||
@intFromEnum(Op.Code.un_pre_dec) and @intFromEnum(code) <=
|
||||
@intFromEnum(Op.Code.un_post_inc))
|
||||
{
|
||||
return AssignTarget.update;
|
||||
}
|
||||
|
||||
return AssignTarget.none;
|
||||
}
|
||||
pub fn isLeftAssociative(code: Op.Code) bool {
|
||||
return @intFromEnum(code) >=
|
||||
@intFromEnum(Op.Code.bin_add) and
|
||||
@intFromEnum(code) < @intFromEnum(Op.Code.bin_comma) and code != .bin_pow;
|
||||
}
|
||||
pub fn isRightAssociative(code: Op.Code) bool {
|
||||
return @intFromEnum(code) >= @intFromEnum(Op.Code.bin_assign) or code == .bin_pow;
|
||||
}
|
||||
pub fn binaryAssignTarget(code: Op.Code) AssignTarget {
|
||||
if (code == .bin_assign) {
|
||||
return AssignTarget.replace;
|
||||
}
|
||||
|
||||
if (@intFromEnum(code) > @intFromEnum(Op.Code.bin_assign)) {
|
||||
return AssignTarget.update;
|
||||
}
|
||||
|
||||
return AssignTarget.none;
|
||||
}
|
||||
|
||||
pub fn isPrefix(code: Op.Code) bool {
|
||||
return @intFromEnum(code) < @intFromEnum(Op.Code.un_post_dec);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Level = enum(u6) {
|
||||
lowest,
|
||||
comma,
|
||||
spread,
|
||||
yield,
|
||||
assign,
|
||||
conditional,
|
||||
nullish_coalescing,
|
||||
logical_or,
|
||||
logical_and,
|
||||
bitwise_or,
|
||||
bitwise_xor,
|
||||
bitwise_and,
|
||||
equals,
|
||||
compare,
|
||||
shift,
|
||||
add,
|
||||
multiply,
|
||||
exponentiation,
|
||||
prefix,
|
||||
postfix,
|
||||
new,
|
||||
call,
|
||||
member,
|
||||
|
||||
pub inline fn lt(self: Level, b: Level) bool {
|
||||
return @intFromEnum(self) < @intFromEnum(b);
|
||||
}
|
||||
pub inline fn gt(self: Level, b: Level) bool {
|
||||
return @intFromEnum(self) > @intFromEnum(b);
|
||||
}
|
||||
pub inline fn gte(self: Level, b: Level) bool {
|
||||
return @intFromEnum(self) >= @intFromEnum(b);
|
||||
}
|
||||
pub inline fn lte(self: Level, b: Level) bool {
|
||||
return @intFromEnum(self) <= @intFromEnum(b);
|
||||
}
|
||||
pub inline fn eql(self: Level, b: Level) bool {
|
||||
return @intFromEnum(self) == @intFromEnum(b);
|
||||
}
|
||||
|
||||
pub inline fn sub(self: Level, i: anytype) Level {
|
||||
return @as(Level, @enumFromInt(@intFromEnum(self) - i));
|
||||
}
|
||||
|
||||
pub inline fn addF(self: Level, i: anytype) Level {
|
||||
return @as(Level, @enumFromInt(@intFromEnum(self) + i));
|
||||
}
|
||||
};
|
||||
|
||||
text: string,
|
||||
level: Level,
|
||||
is_keyword: bool = false,
|
||||
|
||||
pub fn init(triple: anytype) Op {
|
||||
return Op{
|
||||
.text = triple.@"0",
|
||||
.level = triple.@"1",
|
||||
.is_keyword = triple.@"2",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(self.text);
|
||||
}
|
||||
|
||||
pub const TableType: std.EnumArray(Op.Code, Op) = undefined;
|
||||
pub const Table = brk: {
|
||||
var table = std.EnumArray(Op.Code, Op).initUndefined();
|
||||
|
||||
// Prefix
|
||||
table.set(Op.Code.un_pos, Op.init(.{ "+", Level.prefix, false }));
|
||||
table.set(Op.Code.un_neg, Op.init(.{ "-", Level.prefix, false }));
|
||||
table.set(Op.Code.un_cpl, Op.init(.{ "~", Level.prefix, false }));
|
||||
table.set(Op.Code.un_not, Op.init(.{ "!", Level.prefix, false }));
|
||||
table.set(Op.Code.un_void, Op.init(.{ "void", Level.prefix, true }));
|
||||
table.set(Op.Code.un_typeof, Op.init(.{ "typeof", Level.prefix, true }));
|
||||
table.set(Op.Code.un_delete, Op.init(.{ "delete", Level.prefix, true }));
|
||||
|
||||
// Prefix update
|
||||
table.set(Op.Code.un_pre_dec, Op.init(.{ "--", Level.prefix, false }));
|
||||
table.set(Op.Code.un_pre_inc, Op.init(.{ "++", Level.prefix, false }));
|
||||
|
||||
// Postfix update
|
||||
table.set(Op.Code.un_post_dec, Op.init(.{ "--", Level.postfix, false }));
|
||||
table.set(Op.Code.un_post_inc, Op.init(.{ "++", Level.postfix, false }));
|
||||
|
||||
// Left-associative
|
||||
table.set(Op.Code.bin_add, Op.init(.{ "+", Level.add, false }));
|
||||
table.set(Op.Code.bin_sub, Op.init(.{ "-", Level.add, false }));
|
||||
table.set(Op.Code.bin_mul, Op.init(.{ "*", Level.multiply, false }));
|
||||
table.set(Op.Code.bin_div, Op.init(.{ "/", Level.multiply, false }));
|
||||
table.set(Op.Code.bin_rem, Op.init(.{ "%", Level.multiply, false }));
|
||||
table.set(Op.Code.bin_pow, Op.init(.{ "**", Level.exponentiation, false }));
|
||||
table.set(Op.Code.bin_lt, Op.init(.{ "<", Level.compare, false }));
|
||||
table.set(Op.Code.bin_le, Op.init(.{ "<=", Level.compare, false }));
|
||||
table.set(Op.Code.bin_gt, Op.init(.{ ">", Level.compare, false }));
|
||||
table.set(Op.Code.bin_ge, Op.init(.{ ">=", Level.compare, false }));
|
||||
table.set(Op.Code.bin_in, Op.init(.{ "in", Level.compare, true }));
|
||||
table.set(Op.Code.bin_instanceof, Op.init(.{ "instanceof", Level.compare, true }));
|
||||
table.set(Op.Code.bin_shl, Op.init(.{ "<<", Level.shift, false }));
|
||||
table.set(Op.Code.bin_shr, Op.init(.{ ">>", Level.shift, false }));
|
||||
table.set(Op.Code.bin_u_shr, Op.init(.{ ">>>", Level.shift, false }));
|
||||
table.set(Op.Code.bin_loose_eq, Op.init(.{ "==", Level.equals, false }));
|
||||
table.set(Op.Code.bin_loose_ne, Op.init(.{ "!=", Level.equals, false }));
|
||||
table.set(Op.Code.bin_strict_eq, Op.init(.{ "===", Level.equals, false }));
|
||||
table.set(Op.Code.bin_strict_ne, Op.init(.{ "!==", Level.equals, false }));
|
||||
table.set(Op.Code.bin_nullish_coalescing, Op.init(.{ "??", Level.nullish_coalescing, false }));
|
||||
table.set(Op.Code.bin_logical_or, Op.init(.{ "||", Level.logical_or, false }));
|
||||
table.set(Op.Code.bin_logical_and, Op.init(.{ "&&", Level.logical_and, false }));
|
||||
table.set(Op.Code.bin_bitwise_or, Op.init(.{ "|", Level.bitwise_or, false }));
|
||||
table.set(Op.Code.bin_bitwise_and, Op.init(.{ "&", Level.bitwise_and, false }));
|
||||
table.set(Op.Code.bin_bitwise_xor, Op.init(.{ "^", Level.bitwise_xor, false }));
|
||||
|
||||
// Non-associative
|
||||
table.set(Op.Code.bin_comma, Op.init(.{ ",", Level.comma, false }));
|
||||
|
||||
// Right-associative
|
||||
table.set(Op.Code.bin_assign, Op.init(.{ "=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_add_assign, Op.init(.{ "+=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_sub_assign, Op.init(.{ "-=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_mul_assign, Op.init(.{ "*=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_div_assign, Op.init(.{ "/=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_rem_assign, Op.init(.{ "%=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_pow_assign, Op.init(.{ "**=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_shl_assign, Op.init(.{ "<<=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_shr_assign, Op.init(.{ ">>=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_u_shr_assign, Op.init(.{ ">>>=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_bitwise_or_assign, Op.init(.{ "|=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_bitwise_and_assign, Op.init(.{ "&=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_bitwise_xor_assign, Op.init(.{ "^=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_nullish_coalescing_assign, Op.init(.{ "??=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_logical_or_assign, Op.init(.{ "||=", Level.assign, false }));
|
||||
table.set(Op.Code.bin_logical_and_assign, Op.init(.{ "&&=", Level.assign, false }));
|
||||
|
||||
break :brk table;
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const AssignTarget = js_ast.AssignTarget;
|
||||
const Op = js_ast.Op;
|
||||
233
src/ast/S.zig
Normal file
233
src/ast/S.zig
Normal file
@@ -0,0 +1,233 @@
|
||||
pub const Block = struct {
|
||||
stmts: StmtNodeList,
|
||||
close_brace_loc: logger.Loc = logger.Loc.Empty,
|
||||
};
|
||||
|
||||
pub const SExpr = struct {
|
||||
value: ExprNodeIndex,
|
||||
|
||||
// This is set to true for automatically-generated expressions that should
|
||||
// not affect tree shaking. For example, calling a function from the runtime
|
||||
// that doesn't have externally-visible side effects.
|
||||
does_not_affect_tree_shaking: bool = false,
|
||||
};
|
||||
|
||||
pub const Comment = struct { text: string };
|
||||
|
||||
pub const Directive = struct {
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const ExportClause = struct {
|
||||
items: []ClauseItem,
|
||||
is_single_line: bool,
|
||||
};
|
||||
|
||||
pub const Empty = struct {};
|
||||
|
||||
pub const ExportStar = struct {
|
||||
namespace_ref: Ref,
|
||||
alias: ?G.ExportStarAlias = null,
|
||||
import_record_index: u32,
|
||||
};
|
||||
|
||||
// This is an "export = value;" statement in TypeScript
|
||||
pub const ExportEquals = struct { value: ExprNodeIndex };
|
||||
|
||||
pub const Label = struct { name: LocRef, stmt: StmtNodeIndex };
|
||||
|
||||
// This is a stand-in for a TypeScript type declaration
|
||||
pub const TypeScript = struct {};
|
||||
|
||||
pub const Debugger = struct {};
|
||||
|
||||
pub const ExportFrom = struct {
|
||||
items: []ClauseItem,
|
||||
namespace_ref: Ref,
|
||||
import_record_index: u32,
|
||||
is_single_line: bool,
|
||||
};
|
||||
|
||||
pub const ExportDefault = struct {
|
||||
default_name: LocRef, // value may be a SFunction or SClass
|
||||
value: StmtOrExpr,
|
||||
|
||||
pub fn canBeMoved(self: *const ExportDefault) bool {
|
||||
return switch (self.value) {
|
||||
.expr => |e| e.canBeMoved(),
|
||||
.stmt => |s| switch (s.data) {
|
||||
.s_class => |class| class.class.canBeMoved(),
|
||||
.s_function => true,
|
||||
else => false,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Enum = struct {
|
||||
name: LocRef,
|
||||
arg: Ref,
|
||||
values: []EnumValue,
|
||||
is_export: bool,
|
||||
};
|
||||
|
||||
pub const Namespace = struct {
|
||||
name: LocRef,
|
||||
arg: Ref,
|
||||
stmts: StmtNodeList,
|
||||
is_export: bool,
|
||||
};
|
||||
|
||||
pub const Function = struct {
|
||||
func: G.Fn,
|
||||
};
|
||||
|
||||
pub const Class = struct { class: G.Class, is_export: bool = false };
|
||||
|
||||
pub const If = struct {
|
||||
test_: ExprNodeIndex,
|
||||
yes: StmtNodeIndex,
|
||||
no: ?StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const For = struct {
|
||||
// May be a SConst, SLet, SVar, or SExpr
|
||||
init: ?StmtNodeIndex = null,
|
||||
test_: ?ExprNodeIndex = null,
|
||||
update: ?ExprNodeIndex = null,
|
||||
body: StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const ForIn = struct {
|
||||
// May be a SConst, SLet, SVar, or SExpr
|
||||
init: StmtNodeIndex,
|
||||
value: ExprNodeIndex,
|
||||
body: StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const ForOf = struct {
|
||||
is_await: bool = false,
|
||||
// May be a SConst, SLet, SVar, or SExpr
|
||||
init: StmtNodeIndex,
|
||||
value: ExprNodeIndex,
|
||||
body: StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const DoWhile = struct { body: StmtNodeIndex, test_: ExprNodeIndex };
|
||||
|
||||
pub const While = struct {
|
||||
test_: ExprNodeIndex,
|
||||
body: StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const With = struct {
|
||||
value: ExprNodeIndex,
|
||||
body: StmtNodeIndex,
|
||||
body_loc: logger.Loc = logger.Loc.Empty,
|
||||
};
|
||||
|
||||
pub const Try = struct {
|
||||
body_loc: logger.Loc,
|
||||
body: StmtNodeList,
|
||||
|
||||
catch_: ?Catch = null,
|
||||
finally: ?Finally = null,
|
||||
};
|
||||
|
||||
pub const Switch = struct {
|
||||
test_: ExprNodeIndex,
|
||||
body_loc: logger.Loc,
|
||||
cases: []Case,
|
||||
};
|
||||
|
||||
// This object represents all of these types of import statements:
|
||||
//
|
||||
// import 'path'
|
||||
// import {item1, item2} from 'path'
|
||||
// import * as ns from 'path'
|
||||
// import defaultItem, {item1, item2} from 'path'
|
||||
// import defaultItem, * as ns from 'path'
|
||||
//
|
||||
// Many parts are optional and can be combined in different ways. The only
|
||||
// restriction is that you cannot have both a clause and a star namespace.
|
||||
pub const Import = struct {
|
||||
// If this is a star import: This is a Ref for the namespace symbol. The Loc
|
||||
// for the symbol is StarLoc.
|
||||
//
|
||||
// Otherwise: This is an auto-generated Ref for the namespace representing
|
||||
// the imported file. In this case StarLoc is nil. The NamespaceRef is used
|
||||
// when converting this module to a CommonJS module.
|
||||
namespace_ref: Ref,
|
||||
default_name: ?LocRef = null,
|
||||
items: []ClauseItem = &.{},
|
||||
star_name_loc: ?logger.Loc = null,
|
||||
import_record_index: u32,
|
||||
is_single_line: bool = false,
|
||||
};
|
||||
|
||||
pub const Return = struct { value: ?ExprNodeIndex = null };
|
||||
pub const Throw = struct { value: ExprNodeIndex };
|
||||
|
||||
pub const Local = struct {
|
||||
kind: Kind = .k_var,
|
||||
decls: G.Decl.List = .{},
|
||||
is_export: bool = false,
|
||||
// The TypeScript compiler doesn't generate code for "import foo = bar"
|
||||
// statements where the import is never used.
|
||||
was_ts_import_equals: bool = false,
|
||||
|
||||
was_commonjs_export: bool = false,
|
||||
|
||||
pub fn canMergeWith(this: *const Local, other: *const Local) bool {
|
||||
return this.kind == other.kind and this.is_export == other.is_export and
|
||||
this.was_commonjs_export == other.was_commonjs_export;
|
||||
}
|
||||
|
||||
pub const Kind = enum {
|
||||
k_var,
|
||||
k_let,
|
||||
k_const,
|
||||
k_using,
|
||||
k_await_using,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
pub fn isUsing(self: Kind) bool {
|
||||
return self == .k_using or self == .k_await_using;
|
||||
}
|
||||
|
||||
pub fn isReassignable(kind: Kind) bool {
|
||||
return kind == .k_var or kind == .k_let;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const Break = struct {
|
||||
label: ?LocRef = null,
|
||||
};
|
||||
|
||||
pub const Continue = struct {
|
||||
label: ?LocRef = null,
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const Case = js_ast.Case;
|
||||
const Catch = js_ast.Catch;
|
||||
const ClauseItem = js_ast.ClauseItem;
|
||||
const EnumValue = js_ast.EnumValue;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const Finally = js_ast.Finally;
|
||||
const G = js_ast.G;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const Ref = js_ast.Ref;
|
||||
const StmtNodeIndex = js_ast.StmtNodeIndex;
|
||||
const StmtNodeList = js_ast.StmtNodeList;
|
||||
const StmtOrExpr = js_ast.StmtOrExpr;
|
||||
222
src/ast/Scope.zig
Normal file
222
src/ast/Scope.zig
Normal file
@@ -0,0 +1,222 @@
|
||||
pub const MemberHashMap = bun.StringHashMapUnmanaged(Member);
|
||||
|
||||
id: usize = 0,
|
||||
kind: Kind = Kind.block,
|
||||
parent: ?*Scope = null,
|
||||
children: BabyList(*Scope) = .{},
|
||||
members: MemberHashMap = .{},
|
||||
generated: BabyList(Ref) = .{},
|
||||
|
||||
// This is used to store the ref of the label symbol for ScopeLabel scopes.
|
||||
label_ref: ?Ref = null,
|
||||
label_stmt_is_loop: bool = false,
|
||||
|
||||
// If a scope contains a direct eval() expression, then none of the symbols
|
||||
// inside that scope can be renamed. We conservatively assume that the
|
||||
// evaluated code might reference anything that it has access to.
|
||||
contains_direct_eval: bool = false,
|
||||
|
||||
// This is to help forbid "arguments" inside class body scopes
|
||||
forbid_arguments: bool = false,
|
||||
|
||||
strict_mode: StrictModeKind = StrictModeKind.sloppy_mode,
|
||||
|
||||
is_after_const_local_prefix: bool = false,
|
||||
|
||||
// This will be non-null if this is a TypeScript "namespace" or "enum"
|
||||
ts_namespace: ?*TSNamespaceScope = null,
|
||||
|
||||
pub const NestedScopeMap = std.AutoArrayHashMap(u32, bun.BabyList(*Scope));
|
||||
|
||||
pub fn getMemberHash(name: []const u8) u64 {
|
||||
return bun.StringHashMapContext.hash(.{}, name);
|
||||
}
|
||||
|
||||
pub fn getMemberWithHash(this: *const Scope, name: []const u8, hash_value: u64) ?Member {
|
||||
const hashed = bun.StringHashMapContext.Prehashed{
|
||||
.value = hash_value,
|
||||
.input = name,
|
||||
};
|
||||
return this.members.getAdapted(name, hashed);
|
||||
}
|
||||
|
||||
pub fn getOrPutMemberWithHash(
|
||||
this: *Scope,
|
||||
allocator: std.mem.Allocator,
|
||||
name: []const u8,
|
||||
hash_value: u64,
|
||||
) !MemberHashMap.GetOrPutResult {
|
||||
const hashed = bun.StringHashMapContext.Prehashed{
|
||||
.value = hash_value,
|
||||
.input = name,
|
||||
};
|
||||
return this.members.getOrPutContextAdapted(allocator, name, hashed, .{});
|
||||
}
|
||||
|
||||
pub fn reset(this: *Scope) void {
|
||||
this.children.clearRetainingCapacity();
|
||||
this.generated.clearRetainingCapacity();
|
||||
this.members.clearRetainingCapacity();
|
||||
this.parent = null;
|
||||
this.id = 0;
|
||||
this.label_ref = null;
|
||||
this.label_stmt_is_loop = false;
|
||||
this.contains_direct_eval = false;
|
||||
this.strict_mode = .sloppy_mode;
|
||||
this.kind = .block;
|
||||
}
|
||||
|
||||
// Do not make this a packed struct
|
||||
// Two hours of debugging time lost to that.
|
||||
// It causes a crash due to undefined memory
|
||||
pub const Member = struct {
|
||||
ref: Ref,
|
||||
loc: logger.Loc,
|
||||
|
||||
pub fn eql(a: Member, b: Member) bool {
|
||||
return @call(bun.callmod_inline, Ref.eql, .{ a.ref, b.ref }) and a.loc.start == b.loc.start;
|
||||
}
|
||||
};
|
||||
|
||||
pub const SymbolMergeResult = enum {
|
||||
forbidden,
|
||||
replace_with_new,
|
||||
overwrite_with_new,
|
||||
keep_existing,
|
||||
become_private_get_set_pair,
|
||||
become_private_static_get_set_pair,
|
||||
};
|
||||
|
||||
pub fn canMergeSymbols(
|
||||
scope: *Scope,
|
||||
existing: Symbol.Kind,
|
||||
new: Symbol.Kind,
|
||||
comptime is_typescript_enabled: bool,
|
||||
) SymbolMergeResult {
|
||||
if (existing == .unbound) {
|
||||
return .replace_with_new;
|
||||
}
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// In TypeScript, imports are allowed to silently collide with symbols within
|
||||
// the module. Presumably this is because the imports may be type-only:
|
||||
//
|
||||
// import {Foo} from 'bar'
|
||||
// class Foo {}
|
||||
//
|
||||
if (existing == .import) {
|
||||
return .replace_with_new;
|
||||
}
|
||||
|
||||
// "enum Foo {} enum Foo {}"
|
||||
// "namespace Foo { ... } enum Foo {}"
|
||||
if (new == .ts_enum and (existing == .ts_enum or existing == .ts_namespace)) {
|
||||
return .replace_with_new;
|
||||
}
|
||||
|
||||
// "namespace Foo { ... } namespace Foo { ... }"
|
||||
// "function Foo() {} namespace Foo { ... }"
|
||||
// "enum Foo {} namespace Foo { ... }"
|
||||
if (new == .ts_namespace) {
|
||||
switch (existing) {
|
||||
.ts_namespace,
|
||||
.ts_enum,
|
||||
.hoisted_function,
|
||||
.generator_or_async_function,
|
||||
.class,
|
||||
=> return .keep_existing,
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// "var foo; var foo;"
|
||||
// "var foo; function foo() {}"
|
||||
// "function foo() {} var foo;"
|
||||
// "function *foo() {} function *foo() {}" but not "{ function *foo() {} function *foo() {} }"
|
||||
if (Symbol.isKindHoistedOrFunction(new) and
|
||||
Symbol.isKindHoistedOrFunction(existing) and
|
||||
(scope.kind == .entry or scope.kind == .function_body or scope.kind == .function_args or
|
||||
(new == existing and Symbol.isKindHoisted(existing))))
|
||||
{
|
||||
return .replace_with_new;
|
||||
}
|
||||
|
||||
// "get #foo() {} set #foo() {}"
|
||||
// "set #foo() {} get #foo() {}"
|
||||
if ((existing == .private_get and new == .private_set) or
|
||||
(existing == .private_set and new == .private_get))
|
||||
{
|
||||
return .become_private_get_set_pair;
|
||||
}
|
||||
if ((existing == .private_static_get and new == .private_static_set) or
|
||||
(existing == .private_static_set and new == .private_static_get))
|
||||
{
|
||||
return .become_private_static_get_set_pair;
|
||||
}
|
||||
|
||||
// "try {} catch (e) { var e }"
|
||||
if (existing == .catch_identifier and new == .hoisted) {
|
||||
return .replace_with_new;
|
||||
}
|
||||
|
||||
// "function() { var arguments }"
|
||||
if (existing == .arguments and new == .hoisted) {
|
||||
return .keep_existing;
|
||||
}
|
||||
|
||||
// "function() { let arguments }"
|
||||
if (existing == .arguments and new != .hoisted) {
|
||||
return .overwrite_with_new;
|
||||
}
|
||||
|
||||
return .forbidden;
|
||||
}
|
||||
|
||||
pub const Kind = enum(u8) {
|
||||
block,
|
||||
with,
|
||||
label,
|
||||
class_name,
|
||||
class_body,
|
||||
catch_binding,
|
||||
|
||||
// The scopes below stop hoisted variables from extending into parent scopes
|
||||
entry, // This is a module, TypeScript enum, or TypeScript namespace
|
||||
function_args,
|
||||
function_body,
|
||||
class_static_init,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
pub fn recursiveSetStrictMode(s: *Scope, kind: StrictModeKind) void {
|
||||
if (s.strict_mode == .sloppy_mode) {
|
||||
s.strict_mode = kind;
|
||||
for (s.children.slice()) |child| {
|
||||
child.recursiveSetStrictMode(kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub inline fn kindStopsHoisting(s: *const Scope) bool {
|
||||
return @intFromEnum(s.kind) >= @intFromEnum(Kind.entry);
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const Ref = js_ast.Ref;
|
||||
const Scope = js_ast.Scope;
|
||||
const StrictModeKind = js_ast.StrictModeKind;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const TSNamespaceScope = js_ast.TSNamespaceScope;
|
||||
const TypeScript = js_ast.TypeScript;
|
||||
123
src/ast/ServerComponentBoundary.zig
Normal file
123
src/ast/ServerComponentBoundary.zig
Normal file
@@ -0,0 +1,123 @@
|
||||
//! Represents a boundary between client and server code. Every boundary
|
||||
//! gets bundled twice, once for the desired target, and once to generate
|
||||
//! a module of "references". Specifically, the generated file takes the
|
||||
//! canonical Ast as input to derive a wrapper. See `Framework.ServerComponents`
|
||||
//! for more details about this generated file.
|
||||
//!
|
||||
//! This is sometimes abbreviated as SCB
|
||||
use_directive: UseDirective,
|
||||
|
||||
/// The index of the original file.
|
||||
source_index: Index.Int,
|
||||
|
||||
/// Index to the file imported on the opposite platform, which is
|
||||
/// generated by the bundler. For client components, this is the
|
||||
/// server's code. For server actions, this is the client's code.
|
||||
reference_source_index: Index.Int,
|
||||
|
||||
/// When `bake.Framework.ServerComponents.separate_ssr_graph` is enabled this
|
||||
/// points to the separated module. When the SSR graph is not separate, this is
|
||||
/// equal to `reference_source_index`
|
||||
//
|
||||
// TODO: Is this used for server actions.
|
||||
ssr_source_index: Index.Int,
|
||||
|
||||
/// The requirements for this data structure is to have reasonable lookup
|
||||
/// speed, but also being able to pull a `[]const Index.Int` of all
|
||||
/// boundaries for iteration.
|
||||
pub const List = struct {
|
||||
list: std.MultiArrayList(ServerComponentBoundary) = .{},
|
||||
/// Used to facilitate fast lookups into `items` by `.source_index`
|
||||
map: Map = .{},
|
||||
|
||||
const Map = std.ArrayHashMapUnmanaged(void, void, struct {}, true);
|
||||
|
||||
/// Can only be called on the bundler thread.
|
||||
pub fn put(
|
||||
m: *List,
|
||||
allocator: std.mem.Allocator,
|
||||
source_index: Index.Int,
|
||||
use_directive: UseDirective,
|
||||
reference_source_index: Index.Int,
|
||||
ssr_source_index: Index.Int,
|
||||
) !void {
|
||||
try m.list.append(allocator, .{
|
||||
.source_index = source_index,
|
||||
.use_directive = use_directive,
|
||||
.reference_source_index = reference_source_index,
|
||||
.ssr_source_index = ssr_source_index,
|
||||
});
|
||||
const gop = try m.map.getOrPutAdapted(
|
||||
allocator,
|
||||
source_index,
|
||||
Adapter{ .list = m.list.slice() },
|
||||
);
|
||||
bun.assert(!gop.found_existing);
|
||||
}
|
||||
|
||||
/// Can only be called on the bundler thread.
|
||||
pub fn getIndex(l: *const List, real_source_index: Index.Int) ?usize {
|
||||
return l.map.getIndexAdapted(
|
||||
real_source_index,
|
||||
Adapter{ .list = l.list.slice() },
|
||||
);
|
||||
}
|
||||
|
||||
/// Use this to improve speed of accessing fields at the cost of
|
||||
/// storing more pointers. Invalidated when input is mutated.
|
||||
pub fn slice(l: List) Slice {
|
||||
return .{ .list = l.list.slice(), .map = l.map };
|
||||
}
|
||||
|
||||
pub const Slice = struct {
|
||||
list: std.MultiArrayList(ServerComponentBoundary).Slice,
|
||||
map: Map,
|
||||
|
||||
pub fn getIndex(l: *const Slice, real_source_index: Index.Int) ?usize {
|
||||
return l.map.getIndexAdapted(
|
||||
real_source_index,
|
||||
Adapter{ .list = l.list },
|
||||
) orelse return null;
|
||||
}
|
||||
|
||||
pub fn getReferenceSourceIndex(l: *const Slice, real_source_index: Index.Int) ?u32 {
|
||||
const i = l.map.getIndexAdapted(
|
||||
real_source_index,
|
||||
Adapter{ .list = l.list },
|
||||
) orelse return null;
|
||||
bun.unsafeAssert(l.list.capacity > 0); // optimize MultiArrayList.Slice.items
|
||||
return l.list.items(.reference_source_index)[i];
|
||||
}
|
||||
|
||||
pub fn bitSet(scbs: Slice, alloc: std.mem.Allocator, input_file_count: usize) !bun.bit_set.DynamicBitSetUnmanaged {
|
||||
var scb_bitset = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(alloc, input_file_count);
|
||||
for (scbs.list.items(.source_index)) |source_index| {
|
||||
scb_bitset.set(source_index);
|
||||
}
|
||||
return scb_bitset;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Adapter = struct {
|
||||
list: std.MultiArrayList(ServerComponentBoundary).Slice,
|
||||
|
||||
pub fn hash(_: Adapter, key: Index.Int) u32 {
|
||||
return std.hash.uint32(key);
|
||||
}
|
||||
|
||||
pub fn eql(adapt: Adapter, a: Index.Int, _: void, b_index: usize) bool {
|
||||
bun.unsafeAssert(adapt.list.capacity > 0); // optimize MultiArrayList.Slice.items
|
||||
return a == adapt.list.items(.source_index)[b_index];
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const Index = js_ast.Index;
|
||||
const ServerComponentBoundary = js_ast.ServerComponentBoundary;
|
||||
const UseDirective = js_ast.UseDirective;
|
||||
424
src/ast/Stmt.zig
Normal file
424
src/ast/Stmt.zig
Normal file
@@ -0,0 +1,424 @@
|
||||
loc: logger.Loc,
|
||||
data: Data,
|
||||
|
||||
pub const Batcher = NewBatcher(Stmt);
|
||||
|
||||
pub fn assign(a: Expr, b: Expr) Stmt {
|
||||
return Stmt.alloc(
|
||||
S.SExpr,
|
||||
S.SExpr{
|
||||
.value = Expr.assign(a, b),
|
||||
},
|
||||
a.loc,
|
||||
);
|
||||
}
|
||||
|
||||
const Serializable = struct {
|
||||
type: Tag,
|
||||
object: string,
|
||||
value: Data,
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub fn jsonStringify(self: *const Stmt, writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "stmt", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn isTypeScript(self: *Stmt) bool {
|
||||
return @as(Stmt.Tag, self.data) == .s_type_script;
|
||||
}
|
||||
|
||||
pub fn isSuperCall(self: Stmt) bool {
|
||||
return self.data == .s_expr and self.data.s_expr.value.data == .e_call and self.data.s_expr.value.data.e_call.target.data == .e_super;
|
||||
}
|
||||
|
||||
pub fn isMissingExpr(self: Stmt) bool {
|
||||
return self.data == .s_expr and self.data.s_expr.value.data == .e_missing;
|
||||
}
|
||||
|
||||
pub fn empty() Stmt {
|
||||
return Stmt{ .data = .{ .s_empty = None }, .loc = logger.Loc{} };
|
||||
}
|
||||
|
||||
pub fn toEmpty(this: Stmt) Stmt {
|
||||
return .{
|
||||
.data = .{
|
||||
.s_empty = None,
|
||||
},
|
||||
.loc = this.loc,
|
||||
};
|
||||
}
|
||||
|
||||
const None = S.Empty{};
|
||||
|
||||
pub var icount: usize = 0;
|
||||
pub fn init(comptime StatementType: type, origData: *StatementType, loc: logger.Loc) Stmt {
|
||||
icount += 1;
|
||||
|
||||
return switch (comptime StatementType) {
|
||||
S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } },
|
||||
S.Block => Stmt.comptime_init("s_block", S.Block, origData, loc),
|
||||
S.Break => Stmt.comptime_init("s_break", S.Break, origData, loc),
|
||||
S.Class => Stmt.comptime_init("s_class", S.Class, origData, loc),
|
||||
S.Comment => Stmt.comptime_init("s_comment", S.Comment, origData, loc),
|
||||
S.Continue => Stmt.comptime_init("s_continue", S.Continue, origData, loc),
|
||||
S.Debugger => Stmt.comptime_init("s_debugger", S.Debugger, origData, loc),
|
||||
S.Directive => Stmt.comptime_init("s_directive", S.Directive, origData, loc),
|
||||
S.DoWhile => Stmt.comptime_init("s_do_while", S.DoWhile, origData, loc),
|
||||
S.Enum => Stmt.comptime_init("s_enum", S.Enum, origData, loc),
|
||||
S.ExportClause => Stmt.comptime_init("s_export_clause", S.ExportClause, origData, loc),
|
||||
S.ExportDefault => Stmt.comptime_init("s_export_default", S.ExportDefault, origData, loc),
|
||||
S.ExportEquals => Stmt.comptime_init("s_export_equals", S.ExportEquals, origData, loc),
|
||||
S.ExportFrom => Stmt.comptime_init("s_export_from", S.ExportFrom, origData, loc),
|
||||
S.ExportStar => Stmt.comptime_init("s_export_star", S.ExportStar, origData, loc),
|
||||
S.SExpr => Stmt.comptime_init("s_expr", S.SExpr, origData, loc),
|
||||
S.ForIn => Stmt.comptime_init("s_for_in", S.ForIn, origData, loc),
|
||||
S.ForOf => Stmt.comptime_init("s_for_of", S.ForOf, origData, loc),
|
||||
S.For => Stmt.comptime_init("s_for", S.For, origData, loc),
|
||||
S.Function => Stmt.comptime_init("s_function", S.Function, origData, loc),
|
||||
S.If => Stmt.comptime_init("s_if", S.If, origData, loc),
|
||||
S.Import => Stmt.comptime_init("s_import", S.Import, origData, loc),
|
||||
S.Label => Stmt.comptime_init("s_label", S.Label, origData, loc),
|
||||
S.Local => Stmt.comptime_init("s_local", S.Local, origData, loc),
|
||||
S.Namespace => Stmt.comptime_init("s_namespace", S.Namespace, origData, loc),
|
||||
S.Return => Stmt.comptime_init("s_return", S.Return, origData, loc),
|
||||
S.Switch => Stmt.comptime_init("s_switch", S.Switch, origData, loc),
|
||||
S.Throw => Stmt.comptime_init("s_throw", S.Throw, origData, loc),
|
||||
S.Try => Stmt.comptime_init("s_try", S.Try, origData, loc),
|
||||
S.TypeScript => Stmt.comptime_init("s_type_script", S.TypeScript, origData, loc),
|
||||
S.While => Stmt.comptime_init("s_while", S.While, origData, loc),
|
||||
S.With => Stmt.comptime_init("s_with", S.With, origData, loc),
|
||||
else => @compileError("Invalid type in Stmt.init"),
|
||||
};
|
||||
}
|
||||
inline fn comptime_alloc(comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt {
|
||||
return Stmt{
|
||||
.loc = loc,
|
||||
.data = @unionInit(
|
||||
Data,
|
||||
tag_name,
|
||||
Data.Store.append(
|
||||
typename,
|
||||
origData,
|
||||
),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
fn allocateData(allocator: std.mem.Allocator, comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt {
|
||||
const value = allocator.create(@TypeOf(origData)) catch unreachable;
|
||||
value.* = origData;
|
||||
|
||||
return comptime_init(tag_name, *typename, value, loc);
|
||||
}
|
||||
|
||||
inline fn comptime_init(comptime tag_name: string, comptime TypeName: type, origData: TypeName, loc: logger.Loc) Stmt {
|
||||
return Stmt{ .loc = loc, .data = @unionInit(Data, tag_name, origData) };
|
||||
}
|
||||
|
||||
pub fn alloc(comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt {
|
||||
Stmt.Data.Store.assert();
|
||||
|
||||
icount += 1;
|
||||
return switch (StatementData) {
|
||||
S.Block => Stmt.comptime_alloc("s_block", S.Block, origData, loc),
|
||||
S.Break => Stmt.comptime_alloc("s_break", S.Break, origData, loc),
|
||||
S.Class => Stmt.comptime_alloc("s_class", S.Class, origData, loc),
|
||||
S.Comment => Stmt.comptime_alloc("s_comment", S.Comment, origData, loc),
|
||||
S.Continue => Stmt.comptime_alloc("s_continue", S.Continue, origData, loc),
|
||||
S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } },
|
||||
S.Directive => Stmt.comptime_alloc("s_directive", S.Directive, origData, loc),
|
||||
S.DoWhile => Stmt.comptime_alloc("s_do_while", S.DoWhile, origData, loc),
|
||||
S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } },
|
||||
S.Enum => Stmt.comptime_alloc("s_enum", S.Enum, origData, loc),
|
||||
S.ExportClause => Stmt.comptime_alloc("s_export_clause", S.ExportClause, origData, loc),
|
||||
S.ExportDefault => Stmt.comptime_alloc("s_export_default", S.ExportDefault, origData, loc),
|
||||
S.ExportEquals => Stmt.comptime_alloc("s_export_equals", S.ExportEquals, origData, loc),
|
||||
S.ExportFrom => Stmt.comptime_alloc("s_export_from", S.ExportFrom, origData, loc),
|
||||
S.ExportStar => Stmt.comptime_alloc("s_export_star", S.ExportStar, origData, loc),
|
||||
S.SExpr => Stmt.comptime_alloc("s_expr", S.SExpr, origData, loc),
|
||||
S.ForIn => Stmt.comptime_alloc("s_for_in", S.ForIn, origData, loc),
|
||||
S.ForOf => Stmt.comptime_alloc("s_for_of", S.ForOf, origData, loc),
|
||||
S.For => Stmt.comptime_alloc("s_for", S.For, origData, loc),
|
||||
S.Function => Stmt.comptime_alloc("s_function", S.Function, origData, loc),
|
||||
S.If => Stmt.comptime_alloc("s_if", S.If, origData, loc),
|
||||
S.Import => Stmt.comptime_alloc("s_import", S.Import, origData, loc),
|
||||
S.Label => Stmt.comptime_alloc("s_label", S.Label, origData, loc),
|
||||
S.Local => Stmt.comptime_alloc("s_local", S.Local, origData, loc),
|
||||
S.Namespace => Stmt.comptime_alloc("s_namespace", S.Namespace, origData, loc),
|
||||
S.Return => Stmt.comptime_alloc("s_return", S.Return, origData, loc),
|
||||
S.Switch => Stmt.comptime_alloc("s_switch", S.Switch, origData, loc),
|
||||
S.Throw => Stmt.comptime_alloc("s_throw", S.Throw, origData, loc),
|
||||
S.Try => Stmt.comptime_alloc("s_try", S.Try, origData, loc),
|
||||
S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } },
|
||||
S.While => Stmt.comptime_alloc("s_while", S.While, origData, loc),
|
||||
S.With => Stmt.comptime_alloc("s_with", S.With, origData, loc),
|
||||
else => @compileError("Invalid type in Stmt.init"),
|
||||
};
|
||||
}
|
||||
|
||||
pub const Disabler = bun.DebugOnlyDisabler(@This());
|
||||
|
||||
/// When the lifetime of an Stmt.Data's pointer must exist longer than reset() is called, use this function.
|
||||
/// Be careful to free the memory (or use an allocator that does it for you)
|
||||
/// Also, prefer Stmt.init or Stmt.alloc when possible. This will be slower.
|
||||
pub fn allocate(allocator: std.mem.Allocator, comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt {
|
||||
Stmt.Data.Store.assert();
|
||||
|
||||
icount += 1;
|
||||
return switch (StatementData) {
|
||||
S.Block => Stmt.allocateData(allocator, "s_block", S.Block, origData, loc),
|
||||
S.Break => Stmt.allocateData(allocator, "s_break", S.Break, origData, loc),
|
||||
S.Class => Stmt.allocateData(allocator, "s_class", S.Class, origData, loc),
|
||||
S.Comment => Stmt.allocateData(allocator, "s_comment", S.Comment, origData, loc),
|
||||
S.Continue => Stmt.allocateData(allocator, "s_continue", S.Continue, origData, loc),
|
||||
S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } },
|
||||
S.Directive => Stmt.allocateData(allocator, "s_directive", S.Directive, origData, loc),
|
||||
S.DoWhile => Stmt.allocateData(allocator, "s_do_while", S.DoWhile, origData, loc),
|
||||
S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } },
|
||||
S.Enum => Stmt.allocateData(allocator, "s_enum", S.Enum, origData, loc),
|
||||
S.ExportClause => Stmt.allocateData(allocator, "s_export_clause", S.ExportClause, origData, loc),
|
||||
S.ExportDefault => Stmt.allocateData(allocator, "s_export_default", S.ExportDefault, origData, loc),
|
||||
S.ExportEquals => Stmt.allocateData(allocator, "s_export_equals", S.ExportEquals, origData, loc),
|
||||
S.ExportFrom => Stmt.allocateData(allocator, "s_export_from", S.ExportFrom, origData, loc),
|
||||
S.ExportStar => Stmt.allocateData(allocator, "s_export_star", S.ExportStar, origData, loc),
|
||||
S.SExpr => Stmt.allocateData(allocator, "s_expr", S.SExpr, origData, loc),
|
||||
S.ForIn => Stmt.allocateData(allocator, "s_for_in", S.ForIn, origData, loc),
|
||||
S.ForOf => Stmt.allocateData(allocator, "s_for_of", S.ForOf, origData, loc),
|
||||
S.For => Stmt.allocateData(allocator, "s_for", S.For, origData, loc),
|
||||
S.Function => Stmt.allocateData(allocator, "s_function", S.Function, origData, loc),
|
||||
S.If => Stmt.allocateData(allocator, "s_if", S.If, origData, loc),
|
||||
S.Import => Stmt.allocateData(allocator, "s_import", S.Import, origData, loc),
|
||||
S.Label => Stmt.allocateData(allocator, "s_label", S.Label, origData, loc),
|
||||
S.Local => Stmt.allocateData(allocator, "s_local", S.Local, origData, loc),
|
||||
S.Namespace => Stmt.allocateData(allocator, "s_namespace", S.Namespace, origData, loc),
|
||||
S.Return => Stmt.allocateData(allocator, "s_return", S.Return, origData, loc),
|
||||
S.Switch => Stmt.allocateData(allocator, "s_switch", S.Switch, origData, loc),
|
||||
S.Throw => Stmt.allocateData(allocator, "s_throw", S.Throw, origData, loc),
|
||||
S.Try => Stmt.allocateData(allocator, "s_try", S.Try, origData, loc),
|
||||
S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } },
|
||||
S.While => Stmt.allocateData(allocator, "s_while", S.While, origData, loc),
|
||||
S.With => Stmt.allocateData(allocator, "s_with", S.With, origData, loc),
|
||||
else => @compileError("Invalid type in Stmt.init"),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn allocateExpr(allocator: std.mem.Allocator, expr: Expr) Stmt {
|
||||
return Stmt.allocate(allocator, S.SExpr, S.SExpr{ .value = expr }, expr.loc);
|
||||
}
|
||||
|
||||
pub const Tag = enum {
|
||||
s_block,
|
||||
s_break,
|
||||
s_class,
|
||||
s_comment,
|
||||
s_continue,
|
||||
s_directive,
|
||||
s_do_while,
|
||||
s_enum,
|
||||
s_export_clause,
|
||||
s_export_default,
|
||||
s_export_equals,
|
||||
s_export_from,
|
||||
s_export_star,
|
||||
s_expr,
|
||||
s_for_in,
|
||||
s_for_of,
|
||||
s_for,
|
||||
s_function,
|
||||
s_if,
|
||||
s_import,
|
||||
s_label,
|
||||
s_local,
|
||||
s_namespace,
|
||||
s_return,
|
||||
s_switch,
|
||||
s_throw,
|
||||
s_try,
|
||||
s_while,
|
||||
s_with,
|
||||
s_type_script,
|
||||
s_empty,
|
||||
s_debugger,
|
||||
s_lazy_export,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
pub fn isExportLike(tag: Tag) bool {
|
||||
return switch (tag) {
|
||||
.s_export_clause, .s_export_default, .s_export_equals, .s_export_from, .s_export_star, .s_empty => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Data = union(Tag) {
|
||||
s_block: *S.Block,
|
||||
s_break: *S.Break,
|
||||
s_class: *S.Class,
|
||||
s_comment: *S.Comment,
|
||||
s_continue: *S.Continue,
|
||||
s_directive: *S.Directive,
|
||||
s_do_while: *S.DoWhile,
|
||||
s_enum: *S.Enum,
|
||||
s_export_clause: *S.ExportClause,
|
||||
s_export_default: *S.ExportDefault,
|
||||
s_export_equals: *S.ExportEquals,
|
||||
s_export_from: *S.ExportFrom,
|
||||
s_export_star: *S.ExportStar,
|
||||
s_expr: *S.SExpr,
|
||||
s_for_in: *S.ForIn,
|
||||
s_for_of: *S.ForOf,
|
||||
s_for: *S.For,
|
||||
s_function: *S.Function,
|
||||
s_if: *S.If,
|
||||
s_import: *S.Import,
|
||||
s_label: *S.Label,
|
||||
s_local: *S.Local,
|
||||
s_namespace: *S.Namespace,
|
||||
s_return: *S.Return,
|
||||
s_switch: *S.Switch,
|
||||
s_throw: *S.Throw,
|
||||
s_try: *S.Try,
|
||||
s_while: *S.While,
|
||||
s_with: *S.With,
|
||||
|
||||
s_type_script: S.TypeScript,
|
||||
s_empty: S.Empty, // special case, its a zero value type
|
||||
s_debugger: S.Debugger,
|
||||
|
||||
s_lazy_export: *Expr.Data,
|
||||
|
||||
comptime {
|
||||
if (@sizeOf(Stmt) > 24) {
|
||||
@compileLog("Expected Stmt to be <= 24 bytes, but it is", @sizeOf(Stmt), " bytes");
|
||||
}
|
||||
}
|
||||
|
||||
pub const Store = struct {
|
||||
const StoreType = NewStore(&.{
|
||||
S.Block,
|
||||
S.Break,
|
||||
S.Class,
|
||||
S.Comment,
|
||||
S.Continue,
|
||||
S.Directive,
|
||||
S.DoWhile,
|
||||
S.Enum,
|
||||
S.ExportClause,
|
||||
S.ExportDefault,
|
||||
S.ExportEquals,
|
||||
S.ExportFrom,
|
||||
S.ExportStar,
|
||||
S.SExpr,
|
||||
S.ForIn,
|
||||
S.ForOf,
|
||||
S.For,
|
||||
S.Function,
|
||||
S.If,
|
||||
S.Import,
|
||||
S.Label,
|
||||
S.Local,
|
||||
S.Namespace,
|
||||
S.Return,
|
||||
S.Switch,
|
||||
S.Throw,
|
||||
S.Try,
|
||||
S.While,
|
||||
S.With,
|
||||
}, 128);
|
||||
|
||||
pub threadlocal var instance: ?*StoreType = null;
|
||||
pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null;
|
||||
pub threadlocal var disable_reset = false;
|
||||
|
||||
pub fn create() void {
|
||||
if (instance != null or memory_allocator != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
instance = StoreType.init();
|
||||
}
|
||||
|
||||
/// create || reset
|
||||
pub fn begin() void {
|
||||
if (memory_allocator != null) return;
|
||||
if (instance == null) {
|
||||
create();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!disable_reset)
|
||||
instance.?.reset();
|
||||
}
|
||||
|
||||
pub fn reset() void {
|
||||
if (disable_reset or memory_allocator != null) return;
|
||||
instance.?.reset();
|
||||
}
|
||||
|
||||
pub fn deinit() void {
|
||||
if (instance == null or memory_allocator != null) return;
|
||||
instance.?.deinit();
|
||||
instance = null;
|
||||
}
|
||||
|
||||
pub inline fn assert() void {
|
||||
if (comptime Environment.allow_assert) {
|
||||
if (instance == null and memory_allocator == null)
|
||||
bun.unreachablePanic("Store must be init'd", .{});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append(comptime T: type, value: T) *T {
|
||||
if (memory_allocator) |allocator| {
|
||||
return allocator.append(T, value);
|
||||
}
|
||||
|
||||
Disabler.assert();
|
||||
return instance.?.append(T, value);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub fn StoredData(tag: Tag) type {
|
||||
const T = @FieldType(Data, tag);
|
||||
return switch (@typeInfo(T)) {
|
||||
.pointer => |ptr| ptr.child,
|
||||
else => T,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn caresAboutScope(self: *Stmt) bool {
|
||||
return switch (self.data) {
|
||||
.s_block, .s_empty, .s_debugger, .s_expr, .s_if, .s_for, .s_for_in, .s_for_of, .s_do_while, .s_while, .s_with, .s_try, .s_switch, .s_return, .s_throw, .s_break, .s_continue, .s_directive => {
|
||||
return false;
|
||||
},
|
||||
|
||||
.s_local => |local| {
|
||||
return local.kind != .k_var;
|
||||
},
|
||||
else => {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const logger = bun.logger;
|
||||
const string = bun.string;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const ASTMemoryAllocator = js_ast.ASTMemoryAllocator;
|
||||
const Expr = js_ast.Expr;
|
||||
const NewBatcher = js_ast.NewBatcher;
|
||||
const NewStore = js_ast.NewStore;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
489
src/ast/Symbol.zig
Normal file
489
src/ast/Symbol.zig
Normal file
@@ -0,0 +1,489 @@
|
||||
/// This is the name that came from the parser. Printed names may be renamed
|
||||
/// during minification or to avoid name collisions. Do not use the original
|
||||
/// name during printing.
|
||||
original_name: []const u8,
|
||||
|
||||
/// This is used for symbols that represent items in the import clause of an
|
||||
/// ES6 import statement. These should always be referenced by EImportIdentifier
|
||||
/// instead of an EIdentifier. When this is present, the expression should
|
||||
/// be printed as a property access off the namespace instead of as a bare
|
||||
/// identifier.
|
||||
///
|
||||
/// For correctness, this must be stored on the symbol instead of indirectly
|
||||
/// associated with the Ref for the symbol somehow. In ES6 "flat bundling"
|
||||
/// mode, re-exported symbols are collapsed using MergeSymbols() and renamed
|
||||
/// symbols from other files that end up at this symbol must be able to tell
|
||||
/// if it has a namespace alias.
|
||||
namespace_alias: ?G.NamespaceAlias = null,
|
||||
|
||||
/// Used by the parser for single pass parsing.
|
||||
link: Ref = Ref.None,
|
||||
|
||||
/// An estimate of the number of uses of this symbol. This is used to detect
|
||||
/// whether a symbol is used or not. For example, TypeScript imports that are
|
||||
/// unused must be removed because they are probably type-only imports. This
|
||||
/// is an estimate and may not be completely accurate due to oversights in the
|
||||
/// code. But it should always be non-zero when the symbol is used.
|
||||
use_count_estimate: u32 = 0,
|
||||
|
||||
/// This is for generating cross-chunk imports and exports for code splitting.
|
||||
///
|
||||
/// Do not use this directly. Use `chunkIndex()` instead.
|
||||
chunk_index: u32 = invalid_chunk_index,
|
||||
|
||||
/// This is used for minification. Symbols that are declared in sibling scopes
|
||||
/// can share a name. A good heuristic (from Google Closure Compiler) is to
|
||||
/// assign names to symbols from sibling scopes in declaration order. That way
|
||||
/// local variable names are reused in each global function like this, which
|
||||
/// improves gzip compression:
|
||||
///
|
||||
/// function x(a, b) { ... }
|
||||
/// function y(a, b, c) { ... }
|
||||
///
|
||||
/// The parser fills this in for symbols inside nested scopes. There are three
|
||||
/// slot namespaces: regular symbols, label symbols, and private symbols.
|
||||
///
|
||||
/// Do not use this directly. Use `nestedScopeSlot()` instead.
|
||||
nested_scope_slot: u32 = invalid_nested_scope_slot,
|
||||
|
||||
did_keep_name: bool = true,
|
||||
|
||||
must_start_with_capital_letter_for_jsx: bool = false,
|
||||
|
||||
/// The kind of symbol. This is used to determine how to print the symbol
|
||||
/// and how to deal with conflicts, renaming, etc.
|
||||
kind: Kind = Kind.other,
|
||||
|
||||
/// Certain symbols must not be renamed or minified. For example, the
|
||||
/// "arguments" variable is declared by the runtime for every function.
|
||||
/// Renaming can also break any identifier used inside a "with" statement.
|
||||
must_not_be_renamed: bool = false,
|
||||
|
||||
/// We automatically generate import items for property accesses off of
|
||||
/// namespace imports. This lets us remove the expensive namespace imports
|
||||
/// while bundling in many cases, replacing them with a cheap import item
|
||||
/// instead:
|
||||
///
|
||||
/// import * as ns from 'path'
|
||||
/// ns.foo()
|
||||
///
|
||||
/// That can often be replaced by this, which avoids needing the namespace:
|
||||
///
|
||||
/// import {foo} from 'path'
|
||||
/// foo()
|
||||
///
|
||||
/// However, if the import is actually missing then we don't want to report a
|
||||
/// compile-time error like we do for real import items. This status lets us
|
||||
/// avoid this. We also need to be able to replace such import items with
|
||||
/// undefined, which this status is also used for.
|
||||
import_item_status: ImportItemStatus = ImportItemStatus.none,
|
||||
|
||||
/// --- Not actually used yet -----------------------------------------------
|
||||
/// Sometimes we lower private symbols even if they are supported. For example,
|
||||
/// consider the following TypeScript code:
|
||||
///
|
||||
/// class Foo {
|
||||
/// #foo = 123
|
||||
/// bar = this.#foo
|
||||
/// }
|
||||
///
|
||||
/// If "useDefineForClassFields: false" is set in "tsconfig.json", then "bar"
|
||||
/// must use assignment semantics instead of define semantics. We can compile
|
||||
/// that to this code:
|
||||
///
|
||||
/// class Foo {
|
||||
/// constructor() {
|
||||
/// this.#foo = 123;
|
||||
/// this.bar = this.#foo;
|
||||
/// }
|
||||
/// #foo;
|
||||
/// }
|
||||
///
|
||||
/// However, we can't do the same for static fields:
|
||||
///
|
||||
/// class Foo {
|
||||
/// static #foo = 123
|
||||
/// static bar = this.#foo
|
||||
/// }
|
||||
///
|
||||
/// Compiling these static fields to something like this would be invalid:
|
||||
///
|
||||
/// class Foo {
|
||||
/// static #foo;
|
||||
/// }
|
||||
/// Foo.#foo = 123;
|
||||
/// Foo.bar = Foo.#foo;
|
||||
///
|
||||
/// Thus "#foo" must be lowered even though it's supported. Another case is
|
||||
/// when we're converting top-level class declarations to class expressions
|
||||
/// to avoid the TDZ and the class shadowing symbol is referenced within the
|
||||
/// class body:
|
||||
///
|
||||
/// class Foo {
|
||||
/// static #foo = Foo
|
||||
/// }
|
||||
///
|
||||
/// This cannot be converted into something like this:
|
||||
///
|
||||
/// var Foo = class {
|
||||
/// static #foo;
|
||||
/// };
|
||||
/// Foo.#foo = Foo;
|
||||
///
|
||||
/// --- Not actually used yet -----------------------------------------------
|
||||
private_symbol_must_be_lowered: bool = false,
|
||||
|
||||
remove_overwritten_function_declaration: bool = false,
|
||||
|
||||
/// Used in HMR to decide when live binding code is needed.
|
||||
has_been_assigned_to: bool = false,
|
||||
|
||||
comptime {
|
||||
bun.assert_eql(@sizeOf(Symbol), 88);
|
||||
bun.assert_eql(@alignOf(Symbol), @alignOf([]const u8));
|
||||
}
|
||||
|
||||
const invalid_chunk_index = std.math.maxInt(u32);
|
||||
pub const invalid_nested_scope_slot = std.math.maxInt(u32);
|
||||
|
||||
pub const SlotNamespace = enum {
|
||||
must_not_be_renamed,
|
||||
default,
|
||||
label,
|
||||
private_name,
|
||||
mangled_prop,
|
||||
|
||||
pub const CountsArray = std.EnumArray(SlotNamespace, u32);
|
||||
};
|
||||
|
||||
/// This is for generating cross-chunk imports and exports for code splitting.
|
||||
pub inline fn chunkIndex(this: *const Symbol) ?u32 {
|
||||
const i = this.chunk_index;
|
||||
return if (i == invalid_chunk_index) null else i;
|
||||
}
|
||||
|
||||
pub inline fn nestedScopeSlot(this: *const Symbol) ?u32 {
|
||||
const i = this.nested_scope_slot;
|
||||
return if (i == invalid_nested_scope_slot) null else i;
|
||||
}
|
||||
|
||||
pub fn slotNamespace(this: *const Symbol) SlotNamespace {
|
||||
const kind = this.kind;
|
||||
|
||||
if (kind == .unbound or this.must_not_be_renamed) {
|
||||
return .must_not_be_renamed;
|
||||
}
|
||||
|
||||
if (kind.isPrivate()) {
|
||||
return .private_name;
|
||||
}
|
||||
|
||||
return switch (kind) {
|
||||
// .mangled_prop => .mangled_prop,
|
||||
.label => .label,
|
||||
else => .default,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn hasLink(this: *const Symbol) bool {
|
||||
return this.link.tag != .invalid;
|
||||
}
|
||||
|
||||
pub const Kind = enum {
|
||||
/// An unbound symbol is one that isn't declared in the file it's referenced
|
||||
/// in. For example, using "window" without declaring it will be unbound.
|
||||
unbound,
|
||||
|
||||
/// This has special merging behavior. You're allowed to re-declare these
|
||||
/// symbols more than once in the same scope. These symbols are also hoisted
|
||||
/// out of the scope they are declared in to the closest containing function
|
||||
/// or module scope. These are the symbols with this kind:
|
||||
///
|
||||
/// - Function arguments
|
||||
/// - Function statements
|
||||
/// - Variables declared using "var"
|
||||
hoisted,
|
||||
hoisted_function,
|
||||
|
||||
/// There's a weird special case where catch variables declared using a simple
|
||||
/// identifier (i.e. not a binding pattern) block hoisted variables instead of
|
||||
/// becoming an error:
|
||||
///
|
||||
/// var e = 0;
|
||||
/// try { throw 1 } catch (e) {
|
||||
/// print(e) // 1
|
||||
/// var e = 2
|
||||
/// print(e) // 2
|
||||
/// }
|
||||
/// print(e) // 0 (since the hoisting stops at the catch block boundary)
|
||||
///
|
||||
/// However, other forms are still a syntax error:
|
||||
///
|
||||
/// try {} catch (e) { let e }
|
||||
/// try {} catch ({e}) { var e }
|
||||
///
|
||||
/// This symbol is for handling this weird special case.
|
||||
catch_identifier,
|
||||
|
||||
/// Generator and async functions are not hoisted, but still have special
|
||||
/// properties such as being able to overwrite previous functions with the
|
||||
/// same name
|
||||
generator_or_async_function,
|
||||
|
||||
/// This is the special "arguments" variable inside functions
|
||||
arguments,
|
||||
|
||||
/// Classes can merge with TypeScript namespaces.
|
||||
class,
|
||||
|
||||
/// A class-private identifier (i.e. "#foo").
|
||||
private_field,
|
||||
private_method,
|
||||
private_get,
|
||||
private_set,
|
||||
private_get_set_pair,
|
||||
private_static_field,
|
||||
private_static_method,
|
||||
private_static_get,
|
||||
private_static_set,
|
||||
private_static_get_set_pair,
|
||||
|
||||
/// Labels are in their own namespace
|
||||
label,
|
||||
|
||||
/// TypeScript enums can merge with TypeScript namespaces and other TypeScript
|
||||
/// enums.
|
||||
ts_enum,
|
||||
|
||||
/// TypeScript namespaces can merge with classes, functions, TypeScript enums,
|
||||
/// and other TypeScript namespaces.
|
||||
ts_namespace,
|
||||
|
||||
/// In TypeScript, imports are allowed to silently collide with symbols within
|
||||
/// the module. Presumably this is because the imports may be type-only.
|
||||
/// Import statement namespace references should NOT have this set.
|
||||
import,
|
||||
|
||||
/// Assigning to a "const" symbol will throw a TypeError at runtime
|
||||
constant,
|
||||
|
||||
// CSS identifiers that are renamed to be unique to the file they are in
|
||||
local_css,
|
||||
|
||||
/// This annotates all other symbols that don't have special behavior.
|
||||
other,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
pub inline fn isPrivate(kind: Symbol.Kind) bool {
|
||||
return @intFromEnum(kind) >= @intFromEnum(Symbol.Kind.private_field) and @intFromEnum(kind) <= @intFromEnum(Symbol.Kind.private_static_get_set_pair);
|
||||
}
|
||||
|
||||
pub inline fn isHoisted(kind: Symbol.Kind) bool {
|
||||
return switch (kind) {
|
||||
.hoisted, .hoisted_function => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isHoistedOrFunction(kind: Symbol.Kind) bool {
|
||||
return switch (kind) {
|
||||
.hoisted, .hoisted_function, .generator_or_async_function => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isFunction(kind: Symbol.Kind) bool {
|
||||
return switch (kind) {
|
||||
.hoisted_function, .generator_or_async_function => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Use = struct {
|
||||
count_estimate: u32 = 0,
|
||||
};
|
||||
|
||||
pub const List = BabyList(Symbol);
|
||||
pub const NestedList = BabyList(List);
|
||||
|
||||
pub fn mergeContentsWith(this: *Symbol, old: *Symbol) void {
|
||||
this.use_count_estimate += old.use_count_estimate;
|
||||
if (old.must_not_be_renamed) {
|
||||
this.original_name = old.original_name;
|
||||
this.must_not_be_renamed = true;
|
||||
}
|
||||
|
||||
// TODO: MustStartWithCapitalLetterForJSX
|
||||
}
|
||||
|
||||
pub const Map = struct {
|
||||
// This could be represented as a "map[Ref]Symbol" but a two-level array was
|
||||
// more efficient in profiles. This appears to be because it doesn't involve
|
||||
// a hash. This representation also makes it trivial to quickly merge symbol
|
||||
// maps from multiple files together. Each file only generates symbols in a
|
||||
// single inner array, so you can join the maps together by just make a
|
||||
// single outer array containing all of the inner arrays. See the comment on
|
||||
// "Ref" for more detail.
|
||||
symbols_for_source: NestedList = .{},
|
||||
|
||||
pub fn dump(this: Map) void {
|
||||
defer Output.flush();
|
||||
for (this.symbols_for_source.slice(), 0..) |symbols, i| {
|
||||
Output.prettyln("\n\n-- Source ID: {d} ({d} symbols) --\n\n", .{ i, symbols.len });
|
||||
for (symbols.slice(), 0..) |symbol, inner_index| {
|
||||
Output.prettyln(
|
||||
" name: {s}\n tag: {s}\n {any}\n",
|
||||
.{
|
||||
symbol.original_name, @tagName(symbol.kind),
|
||||
if (symbol.hasLink()) symbol.link else Ref{
|
||||
.source_index = @truncate(i),
|
||||
.inner_index = @truncate(inner_index),
|
||||
.tag = .symbol,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assignChunkIndex(this: *Map, decls_: DeclaredSymbol.List, chunk_index: u32) void {
|
||||
const Iterator = struct {
|
||||
map: *Map,
|
||||
chunk_index: u32,
|
||||
|
||||
pub fn next(self: @This(), ref: Ref) void {
|
||||
var symbol = self.map.get(ref).?;
|
||||
symbol.chunk_index = self.chunk_index;
|
||||
}
|
||||
};
|
||||
var decls = decls_;
|
||||
|
||||
DeclaredSymbol.forEachTopLevelSymbol(&decls, Iterator{ .map = this, .chunk_index = chunk_index }, Iterator.next);
|
||||
}
|
||||
|
||||
pub fn merge(this: *Map, old: Ref, new: Ref) Ref {
|
||||
if (old.eql(new)) {
|
||||
return new;
|
||||
}
|
||||
|
||||
var old_symbol = this.get(old).?;
|
||||
if (old_symbol.hasLink()) {
|
||||
const old_link = old_symbol.link;
|
||||
old_symbol.link = this.merge(old_link, new);
|
||||
return old_symbol.link;
|
||||
}
|
||||
|
||||
var new_symbol = this.get(new).?;
|
||||
|
||||
if (new_symbol.hasLink()) {
|
||||
const new_link = new_symbol.link;
|
||||
new_symbol.link = this.merge(old, new_link);
|
||||
return new_symbol.link;
|
||||
}
|
||||
|
||||
old_symbol.link = new;
|
||||
new_symbol.mergeContentsWith(old_symbol);
|
||||
return new;
|
||||
}
|
||||
|
||||
pub fn get(self: *const Map, ref: Ref) ?*Symbol {
|
||||
if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return self.symbols_for_source.at(ref.sourceIndex()).mut(ref.innerIndex());
|
||||
}
|
||||
|
||||
pub fn getConst(self: *const Map, ref: Ref) ?*const Symbol {
|
||||
if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return self.symbols_for_source.at(ref.sourceIndex()).at(ref.innerIndex());
|
||||
}
|
||||
|
||||
pub fn init(sourceCount: usize, allocator: std.mem.Allocator) !Map {
|
||||
const symbols_for_source: NestedList = NestedList.init(try allocator.alloc([]Symbol, sourceCount));
|
||||
return Map{ .symbols_for_source = symbols_for_source };
|
||||
}
|
||||
|
||||
pub fn initWithOneList(list: List) Map {
|
||||
const baby_list = BabyList(List).init((&list)[0..1]);
|
||||
return initList(baby_list);
|
||||
}
|
||||
|
||||
pub fn initList(list: NestedList) Map {
|
||||
return Map{ .symbols_for_source = list };
|
||||
}
|
||||
|
||||
pub fn getWithLink(symbols: *const Map, ref: Ref) ?*Symbol {
|
||||
var symbol: *Symbol = symbols.get(ref) orelse return null;
|
||||
if (symbol.hasLink()) {
|
||||
return symbols.get(symbol.link) orelse symbol;
|
||||
}
|
||||
return symbol;
|
||||
}
|
||||
|
||||
pub fn getWithLinkConst(symbols: *Map, ref: Ref) ?*const Symbol {
|
||||
var symbol: *const Symbol = symbols.getConst(ref) orelse return null;
|
||||
if (symbol.hasLink()) {
|
||||
return symbols.getConst(symbol.link) orelse symbol;
|
||||
}
|
||||
return symbol;
|
||||
}
|
||||
|
||||
pub fn followAll(symbols: *Map) void {
|
||||
const trace = bun.perf.trace("Symbols.followAll");
|
||||
defer trace.end();
|
||||
for (symbols.symbols_for_source.slice()) |list| {
|
||||
for (list.slice()) |*symbol| {
|
||||
if (!symbol.hasLink()) continue;
|
||||
symbol.link = follow(symbols, symbol.link);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Equivalent to followSymbols in esbuild
|
||||
pub fn follow(symbols: *const Map, ref: Ref) Ref {
|
||||
var symbol = symbols.get(ref) orelse return ref;
|
||||
if (!symbol.hasLink()) {
|
||||
return ref;
|
||||
}
|
||||
|
||||
const link = follow(symbols, symbol.link);
|
||||
|
||||
if (!symbol.link.eql(link)) {
|
||||
symbol.link = link;
|
||||
}
|
||||
|
||||
return link;
|
||||
}
|
||||
};
|
||||
|
||||
pub inline fn isHoisted(self: *const Symbol) bool {
|
||||
return Symbol.isKindHoisted(self.kind);
|
||||
}
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BabyList = bun.BabyList;
|
||||
const Output = bun.Output;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const DeclaredSymbol = js_ast.DeclaredSymbol;
|
||||
const G = js_ast.G;
|
||||
const ImportItemStatus = js_ast.ImportItemStatus;
|
||||
const Ref = js_ast.Ref;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
pub const isKindFunction = Symbol.Kind.isFunction;
|
||||
pub const isKindHoisted = Symbol.Kind.isHoisted;
|
||||
pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction;
|
||||
pub const isKindPrivate = Symbol.Kind.isPrivate;
|
||||
141
src/ast/TS.zig
Normal file
141
src/ast/TS.zig
Normal file
@@ -0,0 +1,141 @@
|
||||
/// This is for TypeScript "enum" and "namespace" blocks. Each block can
|
||||
/// potentially be instantiated multiple times. The exported members of each
|
||||
/// block are merged into a single namespace while the non-exported code is
|
||||
/// still scoped to just within that block:
|
||||
///
|
||||
/// let x = 1;
|
||||
/// namespace Foo {
|
||||
/// let x = 2;
|
||||
/// export let y = 3;
|
||||
/// }
|
||||
/// namespace Foo {
|
||||
/// console.log(x); // 1
|
||||
/// console.log(y); // 3
|
||||
/// }
|
||||
///
|
||||
/// Doing this also works inside an enum:
|
||||
///
|
||||
/// enum Foo {
|
||||
/// A = 3,
|
||||
/// B = A + 1,
|
||||
/// }
|
||||
/// enum Foo {
|
||||
/// C = A + 2,
|
||||
/// }
|
||||
/// console.log(Foo.B) // 4
|
||||
/// console.log(Foo.C) // 5
|
||||
///
|
||||
/// This is a form of identifier lookup that works differently than the
|
||||
/// hierarchical scope-based identifier lookup in JavaScript. Lookup now needs
|
||||
/// to search sibling scopes in addition to parent scopes. This is accomplished
|
||||
/// by sharing the map of exported members between all matching sibling scopes.
|
||||
pub const TSNamespaceScope = struct {
|
||||
/// This is specific to this namespace block. It's the argument of the
|
||||
/// immediately-invoked function expression that the namespace block is
|
||||
/// compiled into:
|
||||
///
|
||||
/// var ns;
|
||||
/// (function (ns2) {
|
||||
/// ns2.x = 123;
|
||||
/// })(ns || (ns = {}));
|
||||
///
|
||||
/// This variable is "ns2" in the above example. It's the symbol to use when
|
||||
/// generating property accesses off of this namespace when it's in scope.
|
||||
arg_ref: Ref,
|
||||
|
||||
/// This is shared between all sibling namespace blocks
|
||||
exported_members: *TSNamespaceMemberMap,
|
||||
|
||||
/// This is a lazily-generated map of identifiers that actually represent
|
||||
/// property accesses to this namespace's properties. For example:
|
||||
///
|
||||
/// namespace x {
|
||||
/// export let y = 123
|
||||
/// }
|
||||
/// namespace x {
|
||||
/// export let z = y
|
||||
/// }
|
||||
///
|
||||
/// This should be compiled into the following code:
|
||||
///
|
||||
/// var x;
|
||||
/// (function(x2) {
|
||||
/// x2.y = 123;
|
||||
/// })(x || (x = {}));
|
||||
/// (function(x3) {
|
||||
/// x3.z = x3.y;
|
||||
/// })(x || (x = {}));
|
||||
///
|
||||
/// When we try to find the symbol "y", we instead return one of these lazily
|
||||
/// generated proxy symbols that represent the property access "x3.y". This
|
||||
/// map is unique per namespace block because "x3" is the argument symbol that
|
||||
/// is specific to that particular namespace block.
|
||||
property_accesses: bun.StringArrayHashMapUnmanaged(Ref) = .{},
|
||||
|
||||
/// Even though enums are like namespaces and both enums and namespaces allow
|
||||
/// implicit references to properties of sibling scopes, they behave like
|
||||
/// separate, er, namespaces. Implicit references only work namespace-to-
|
||||
/// namespace and enum-to-enum. They do not work enum-to-namespace. And I'm
|
||||
/// not sure what's supposed to happen for the namespace-to-enum case because
|
||||
/// the compiler crashes: https://github.com/microsoft/TypeScript/issues/46891.
|
||||
/// So basically these both work:
|
||||
///
|
||||
/// enum a { b = 1 }
|
||||
/// enum a { c = b }
|
||||
///
|
||||
/// namespace x { export let y = 1 }
|
||||
/// namespace x { export let z = y }
|
||||
///
|
||||
/// This doesn't work:
|
||||
///
|
||||
/// enum a { b = 1 }
|
||||
/// namespace a { export let c = b }
|
||||
///
|
||||
/// And this crashes the TypeScript compiler:
|
||||
///
|
||||
/// namespace a { export let b = 1 }
|
||||
/// enum a { c = b }
|
||||
///
|
||||
/// Therefore we only allow enum/enum and namespace/namespace interactions.
|
||||
is_enum_scope: bool,
|
||||
};
|
||||
|
||||
pub const TSNamespaceMemberMap = bun.StringArrayHashMapUnmanaged(TSNamespaceMember);
|
||||
|
||||
pub const TSNamespaceMember = struct {
|
||||
loc: logger.Loc,
|
||||
data: Data,
|
||||
|
||||
pub const Data = union(enum) {
|
||||
/// "namespace ns { export let it }"
|
||||
property,
|
||||
/// "namespace ns { export namespace it {} }"
|
||||
namespace: *TSNamespaceMemberMap,
|
||||
/// "enum ns { it }"
|
||||
enum_number: f64,
|
||||
/// "enum ns { it = 'it' }"
|
||||
enum_string: *E.String,
|
||||
/// "enum ns { it = something() }"
|
||||
enum_property: void,
|
||||
|
||||
pub fn isEnum(data: Data) bool {
|
||||
return switch (data) {
|
||||
inline else => |_, tag| comptime std.mem.startsWith(u8, @tagName(tag), "enum_"),
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.js_ast;
|
||||
const E = js_ast.E;
|
||||
const Ref = js_ast.Ref;
|
||||
|
||||
const G = js_ast.G;
|
||||
pub const Class = G.Class;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user