mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
206 Commits
dylan/fix-
...
cursor/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8192578f0 | ||
|
|
f7b221b3bd | ||
|
|
26f7fcbbb0 | ||
|
|
91e3076ec7 | ||
|
|
f62940bbda | ||
|
|
c82345c0a0 | ||
|
|
817d0464f6 | ||
|
|
a5bb525614 | ||
|
|
4cb7910e32 | ||
|
|
d7970946eb | ||
|
|
014fb6be8f | ||
|
|
5c7991b707 | ||
|
|
da5fc817d1 | ||
|
|
407c4e800a | ||
|
|
11070b8e16 | ||
|
|
adfdaab4fd | ||
|
|
bfd7fc06c7 | ||
|
|
bd3abc5a2a | ||
|
|
193193024f | ||
|
|
6edc3a9900 | ||
|
|
1bd44e9ce7 | ||
|
|
c7327d62c2 | ||
|
|
90dda8219f | ||
|
|
885979644d | ||
|
|
13c5b0d9cb | ||
|
|
d6e45afef9 | ||
|
|
300aedd9cc | ||
|
|
d9cf836b67 | ||
|
|
293215778f | ||
|
|
95346bd919 | ||
|
|
ceaaed4848 | ||
|
|
abaa69183b | ||
|
|
3e1075410b | ||
|
|
7a88bb0e1c | ||
|
|
7a790581e0 | ||
|
|
d5cc530024 | ||
|
|
d7548325b1 | ||
|
|
d11fd94cdb | ||
|
|
4cbd040485 | ||
|
|
773484a628 | ||
|
|
71c14fac7b | ||
|
|
b2a728e45d | ||
|
|
390798c172 | ||
|
|
284de53f26 | ||
|
|
5a025abddf | ||
|
|
4ab4b1b131 | ||
|
|
13ea970852 | ||
|
|
ba78d5b2c3 | ||
|
|
ce8767cdc8 | ||
|
|
082a9cb59c | ||
|
|
3c37f25b65 | ||
|
|
a079743a02 | ||
|
|
e0852fd651 | ||
|
|
6bbd1e0685 | ||
|
|
4534f6e635 | ||
|
|
c62a7a77a3 | ||
|
|
ecf5ea389f | ||
|
|
010ef4d119 | ||
|
|
4d77cd53f1 | ||
|
|
3cf353b755 | ||
|
|
fd894f5a65 | ||
|
|
a9969b7db2 | ||
|
|
27a08fca84 | ||
|
|
a398bd62a3 | ||
|
|
2aa7c59727 | ||
|
|
7765b61038 | ||
|
|
8a06ddb1fb | ||
|
|
2e76e69939 | ||
|
|
aa404b14c4 | ||
|
|
a4819b41e9 | ||
|
|
f5bfda9699 | ||
|
|
9f5adfefe3 | ||
|
|
316c8d6c48 | ||
|
|
da87890532 | ||
|
|
576f66c149 | ||
|
|
cd0756c95c | ||
|
|
c92f3f7b72 | ||
|
|
f1226c9767 | ||
|
|
b111e6db02 | ||
|
|
ffffb634c6 | ||
|
|
d109183d3e | ||
|
|
14c9165d6f | ||
|
|
c42539b0bf | ||
|
|
022a567af0 | ||
|
|
cfb8956ac5 | ||
|
|
2bb36ca6b4 | ||
|
|
24b3de1bc3 | ||
|
|
b01ffe6da8 | ||
|
|
579f2ecd51 | ||
|
|
627b0010e0 | ||
|
|
3369e25a70 | ||
|
|
06a40f0b29 | ||
|
|
7989352b39 | ||
|
|
e1ab6fe36b | ||
|
|
14f59568cc | ||
|
|
1855836259 | ||
|
|
c85cf136a5 | ||
|
|
4da85ac9c1 | ||
|
|
9248d81871 | ||
|
|
ba21d6d54b | ||
|
|
32985591eb | ||
|
|
544d399980 | ||
|
|
809992229f | ||
|
|
9a0624bd99 | ||
|
|
ec2c2281cf | ||
|
|
df017990aa | ||
|
|
bf02d04479 | ||
|
|
5910504aeb | ||
|
|
8759527feb | ||
|
|
7b4b299be0 | ||
|
|
ff8c2dcbc4 | ||
|
|
a275ed654b | ||
|
|
7b164ee9de | ||
|
|
fc92921a4a | ||
|
|
44d04968cd | ||
|
|
e6ab636313 | ||
|
|
325d0b1ed6 | ||
|
|
a8e4489e10 | ||
|
|
31980bc151 | ||
|
|
e58df65a75 | ||
|
|
6317d6498f | ||
|
|
9e61b70535 | ||
|
|
58c1372b50 | ||
|
|
88840dcafa | ||
|
|
793a9752c9 | ||
|
|
8f08e84c1e | ||
|
|
3605531e34 | ||
|
|
7dc58e0ce4 | ||
|
|
15a58cca1c | ||
|
|
a3fdfeb924 | ||
|
|
c024e73e6a | ||
|
|
392212b090 | ||
|
|
3ea6133c46 | ||
|
|
5d84f8a102 | ||
|
|
9e329ee605 | ||
|
|
76f6574729 | ||
|
|
50b938561a | ||
|
|
3b75095f0c | ||
|
|
7b127c946d | ||
|
|
b9a63893fe | ||
|
|
ff1a35668f | ||
|
|
b36b4b2888 | ||
|
|
e7e5528632 | ||
|
|
9a5ff02420 | ||
|
|
4e9ee08a4a | ||
|
|
e11ac9d1b8 | ||
|
|
e9414966ca | ||
|
|
b2ae98865b | ||
|
|
e8ed50cd9a | ||
|
|
9dd799d2e6 | ||
|
|
ba28eeece6 | ||
|
|
e9f908fcbf | ||
|
|
654472f217 | ||
|
|
5dcf99424c | ||
|
|
ae91711010 | ||
|
|
ca6ba0fa2d | ||
|
|
3195df8796 | ||
|
|
9d1eace981 | ||
|
|
8e80afbce1 | ||
|
|
efb6b823c9 | ||
|
|
6d348fa759 | ||
|
|
69be630aea | ||
|
|
bca833ad59 | ||
|
|
ef9ea8ae1c | ||
|
|
a844957eb3 | ||
|
|
573927c4bf | ||
|
|
3e97c1caf3 | ||
|
|
b4450db807 | ||
|
|
6a363a38da | ||
|
|
ffa286ef70 | ||
|
|
2fc8785868 | ||
|
|
8ddb92085b | ||
|
|
4ca83be84f | ||
|
|
8aae534270 | ||
|
|
98ee30eccf | ||
|
|
562a65037d | ||
|
|
beb1db967b | ||
|
|
0efbb29581 | ||
|
|
0e883c935c | ||
|
|
497360d543 | ||
|
|
e23491391b | ||
|
|
259bf47abd | ||
|
|
d1ac52da2c | ||
|
|
1ebec90d6e | ||
|
|
f1504c4265 | ||
|
|
21f238a827 | ||
|
|
33be08bde8 | ||
|
|
67b64c3334 | ||
|
|
bfd12eeeba | ||
|
|
004ee11bed | ||
|
|
457c15e424 | ||
|
|
815182799e | ||
|
|
a5cb42c407 | ||
|
|
0dade44a37 | ||
|
|
09d3de918f | ||
|
|
9e13a93215 | ||
|
|
2c5e9e5532 | ||
|
|
9a392b39e2 | ||
|
|
11ed29068f | ||
|
|
ea7b9ea976 | ||
|
|
9bee7a64a2 | ||
|
|
ea6f6dff7f | ||
|
|
342fe232d0 | ||
|
|
89c5e40544 | ||
|
|
95af099a0c | ||
|
|
8686361f4f |
78
.agent/agent.mjs
Normal file
78
.agent/agent.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -228,13 +228,7 @@ function getRetry(limit = 0) {
|
||||
manual: {
|
||||
permit_on_passed: true,
|
||||
},
|
||||
automatic: [
|
||||
{ exit_status: 1, limit },
|
||||
{ exit_status: -1, limit: 1 },
|
||||
{ exit_status: 255, limit: 1 },
|
||||
{ signal_reason: "cancel", limit: 1 },
|
||||
{ signal_reason: "agent_stop", limit: 1 },
|
||||
],
|
||||
automatic: false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -315,6 +309,19 @@ function getCppAgent(platform, options) {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Platform}
|
||||
*/
|
||||
function getZigPlatform() {
|
||||
return {
|
||||
os: "linux",
|
||||
arch: "aarch64",
|
||||
abi: "musl",
|
||||
distro: "alpine",
|
||||
release: "3.21",
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
@@ -328,21 +335,9 @@ function getZigAgent(platform, options) {
|
||||
// queue: "build-zig",
|
||||
// };
|
||||
|
||||
return getEc2Agent(
|
||||
{
|
||||
os: "linux",
|
||||
arch: "x64",
|
||||
abi: "musl",
|
||||
distro: "alpine",
|
||||
release: "3.21",
|
||||
},
|
||||
options,
|
||||
{
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 4,
|
||||
threadsPerCore: 1,
|
||||
},
|
||||
);
|
||||
return getEc2Agent(getZigPlatform(), options, {
|
||||
instanceType: "r8g.large",
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -455,7 +450,7 @@ function getBuildCppStep(platform, options) {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
// We used to build the C++ dependencies and bun in seperate steps.
|
||||
// We used to build the C++ dependencies and bun in separate steps.
|
||||
// However, as long as the zig build takes longer than both sequentially,
|
||||
// it's cheaper to run them in the same step. Can be revisited in the future.
|
||||
command: [`${command} --target bun`, `${command} --target dependencies`],
|
||||
@@ -574,7 +569,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" ? 90 : 30,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
@@ -927,7 +922,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
@@ -935,7 +930,7 @@ function getOptionsStep() {
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into seperate steps (this will be very slow)",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
@@ -1113,6 +1108,11 @@ async function getPipeline(options = {}) {
|
||||
steps.push(
|
||||
...relevantBuildPlatforms.map(target => {
|
||||
const imageKey = getImageKey(target);
|
||||
const zigImageKey = getImageKey(getZigPlatform());
|
||||
const dependsOn = imagePlatforms.has(zigImageKey) ? [`${zigImageKey}-build-image`] : [];
|
||||
if (imagePlatforms.has(imageKey)) {
|
||||
dependsOn.push(`${imageKey}-build-image`);
|
||||
}
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
@@ -1122,7 +1122,7 @@ async function getPipeline(options = {}) {
|
||||
? [getBuildBunStep(target, options)]
|
||||
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
|
||||
},
|
||||
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
|
||||
...dependsOn,
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,27 +1,13 @@
|
||||
---
|
||||
description: How to build Bun
|
||||
globs:
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# How to build Bun
|
||||
|
||||
## CMake
|
||||
Run:
|
||||
|
||||
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
|
||||
|
||||
* `CMakeLists.txt`
|
||||
* `cmake/`
|
||||
* `Globals.cmake` - macros and functions used by all the other files
|
||||
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
|
||||
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
|
||||
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
|
||||
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
|
||||
|
||||
## How to
|
||||
|
||||
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
|
||||
|
||||
```sh
|
||||
bun run build # builds a debug build: `build/debug/bun-debug`
|
||||
bun run build:release # builds a release build: `build/release/bun`
|
||||
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
|
||||
```bash
|
||||
bun bd
|
||||
```
|
||||
|
||||
@@ -91,7 +91,7 @@ devTest("html file is watched", {
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
|
||||
203
.cursor/rules/registering-bun-modules.mdc
Normal file
203
.cursor/rules/registering-bun-modules.mdc
Normal file
@@ -0,0 +1,203 @@
|
||||
# Registering Functions, Objects, and Modules in Bun
|
||||
|
||||
This guide documents the process of adding new functionality to the Bun global object and runtime.
|
||||
|
||||
## Overview
|
||||
|
||||
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
|
||||
|
||||
There are several key ways to expose functionality in Bun:
|
||||
|
||||
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
|
||||
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
|
||||
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
|
||||
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
|
||||
|
||||
## The Registration Process
|
||||
|
||||
Adding new functionality to Bun involves several coordinated steps across multiple files:
|
||||
|
||||
### 1. Implement the Core Functionality in Zig
|
||||
|
||||
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
|
||||
|
||||
- `src/valkey/` for Redis/Valkey client
|
||||
- `src/semver/` for SemVer functionality
|
||||
- `src/smtp/` for SMTP client
|
||||
|
||||
### 2. Create JavaScript Bindings
|
||||
|
||||
Create bindings that expose your Zig functionality to JavaScript:
|
||||
|
||||
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
|
||||
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
|
||||
|
||||
Example from a class definition file:
|
||||
|
||||
```typescript
|
||||
// Example from a .classes.ts file
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "YourFeature",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
yourMethod: {
|
||||
fn: "yourZigMethod",
|
||||
length: 1,
|
||||
},
|
||||
property: {
|
||||
getter: "getProperty",
|
||||
},
|
||||
},
|
||||
values: ["cachedValues"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
|
||||
|
||||
Add an entry to the `FOR_EACH_GETTER` macro:
|
||||
|
||||
```c
|
||||
// In BunObject+exports.h
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
macro(CryptoHasher) \
|
||||
... \
|
||||
macro(YourFeature) \
|
||||
```
|
||||
|
||||
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
|
||||
|
||||
Implement a getter function in `BunObject.zig` that returns your feature:
|
||||
|
||||
```zig
|
||||
// In BunObject.zig
|
||||
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
|
||||
|
||||
// In the exportAll() function:
|
||||
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
|
||||
```
|
||||
|
||||
### 5. Implement the Getter Function in a Relevant Zig File
|
||||
|
||||
Implement the function that creates your object:
|
||||
|
||||
```zig
|
||||
// In your main module file (e.g., src/your_feature/your_feature.zig)
|
||||
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.YourFeature.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
### 6. Add to Build System
|
||||
|
||||
Ensure your files are included in the build system by adding them to the appropriate targets.
|
||||
|
||||
## Example: Adding a New Module
|
||||
|
||||
Here's a comprehensive example of adding a hypothetical SMTP module:
|
||||
|
||||
1. Create implementation files in `src/smtp/`:
|
||||
|
||||
- `index.zig`: Main entry point that exports everything
|
||||
- `SmtpClient.zig`: Core SMTP client implementation
|
||||
- `js_smtp.zig`: JavaScript bindings
|
||||
- `js_bindings.classes.ts`: Class definition
|
||||
|
||||
2. Define your JS class in `js_bindings.classes.ts`:
|
||||
|
||||
```typescript
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "EmailClient",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
configurable: false,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
send: {
|
||||
fn: "send",
|
||||
length: 1,
|
||||
},
|
||||
verify: {
|
||||
fn: "verify",
|
||||
length: 0,
|
||||
},
|
||||
close: {
|
||||
fn: "close",
|
||||
length: 0,
|
||||
},
|
||||
},
|
||||
values: ["connectionPromise"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
3. Add getter to `BunObject+exports.h`:
|
||||
|
||||
```c
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
... \
|
||||
macro(SMTP) \
|
||||
```
|
||||
|
||||
4. Add getter function to `BunObject.zig`:
|
||||
|
||||
```zig
|
||||
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
|
||||
|
||||
// In exportAll:
|
||||
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
|
||||
```
|
||||
|
||||
5. Implement getter in your module:
|
||||
|
||||
```zig
|
||||
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.JSEmailClient.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Follow Naming Conventions**: Align your naming with existing patterns
|
||||
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
|
||||
3. **Memory Management**: Be careful with memory management and reference counting
|
||||
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
|
||||
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
|
||||
6. **Testing**: Add tests for your new functionality
|
||||
|
||||
## Common Gotchas
|
||||
|
||||
- Be sure to handle reference counting properly with `ref()`/`deref()`
|
||||
- Always implement proper cleanup in `deinit()` and `finalize()`
|
||||
- For network operations, manage socket lifetimes correctly
|
||||
- Use `JSC.Codegen` correctly to generate necessary binding code
|
||||
|
||||
## Related Files
|
||||
|
||||
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
|
||||
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
|
||||
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
|
||||
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
|
||||
|
||||
## Additional Resources
|
||||
|
||||
For more detailed information on specific topics:
|
||||
|
||||
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
|
||||
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples
|
||||
@@ -11,10 +11,10 @@ You'll find all of Bun's tests in the `test/` directory.
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
|
||||
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
|
||||
18
.github/CODEOWNERS
vendored
18
.github/CODEOWNERS
vendored
@@ -1,18 +1,18 @@
|
||||
# Project
|
||||
.github/CODEOWNERS @Jarred-Sumner
|
||||
/.github/CODEOWNERS @Jarred-Sumner
|
||||
|
||||
# Build system
|
||||
CMakeLists.txt @Electroid
|
||||
cmake/ @Electroid
|
||||
scripts/ @Electroid
|
||||
/CMakeLists.txt @Electroid
|
||||
/cmake/*.cmake @Electroid
|
||||
/scripts/ @Electroid
|
||||
|
||||
# CI
|
||||
.buildkite/ @Electroid
|
||||
.github/workflows/ @Electroid
|
||||
/.buildkite/ @Electroid
|
||||
/.github/workflows/ @Electroid
|
||||
|
||||
# Debugger protocol
|
||||
packages/bun-inspector-protocol/ @Electroid
|
||||
packages/bun-debug-adapter-protocol/ @Electroid
|
||||
/packages/bun-inspector-protocol/ @Electroid
|
||||
/packages/bun-debug-adapter-protocol/ @Electroid
|
||||
|
||||
# Tests
|
||||
test/expectations.txt @Jarred-Sumner
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
35
.github/workflows/claude.yml
vendored
Normal file
35
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
58
.github/workflows/codex-test-sync.yml
vendored
Normal file
58
.github/workflows/codex-test-sync.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Codex Test Sync
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, opened]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.15"
|
||||
|
||||
jobs:
|
||||
sync-node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
|
||||
contains(github.head_ref, 'codex')
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: |
|
||||
test/js/node/test/parallel/**/*.{js,mjs,ts}
|
||||
test/js/node/test/sequential/**/*.{js,mjs,ts}
|
||||
|
||||
- name: Sync tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Changed test files:"
|
||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
||||
|
||||
# Process each changed test file
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
# Extract test name from file path
|
||||
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
done
|
||||
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Sync Node.js tests with upstream"
|
||||
3
.github/workflows/format.yml
vendored
3
.github/workflows/format.yml
vendored
@@ -44,7 +44,8 @@ jobs:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
zig fmt src/**.zig
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
41
.github/workflows/glob-sources.yml
vendored
Normal file
41
.github/workflows/glob-sources.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Glob Sources
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
|
||||
jobs:
|
||||
glob-sources:
|
||||
name: Glob Sources
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Glob sources
|
||||
run: bun scripts/glob-sources.mjs
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun scripts/glob-sources.mjs`"
|
||||
|
||||
127
.github/workflows/release.yml
vendored
127
.github/workflows/release.yml
vendored
@@ -48,6 +48,10 @@ on:
|
||||
description: "Should types be PR'd to DefinitelyTyped?"
|
||||
type: boolean
|
||||
default: false
|
||||
use-sfx:
|
||||
description: Should self-extracting archives be generated?
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
sign:
|
||||
@@ -327,6 +331,129 @@ jobs:
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
linux-sfx:
|
||||
name: Generate Linux SFX Archives
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && github.event.inputs.use-sfx == 'true') }}
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64
|
||||
display: x64
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64-baseline
|
||||
display: x64-baseline
|
||||
baseline: true
|
||||
- arch: aarch64
|
||||
zip_name: bun-linux-aarch64
|
||||
display: aarch64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Environment
|
||||
run: |
|
||||
echo "Creating working directory..."
|
||||
mkdir -p sfx-build
|
||||
- name: Download Bun Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Download the zip file from GitHub releases
|
||||
echo "Downloading ${{ matrix.zip_name }}.zip..."
|
||||
|
||||
if [[ "${{ env.BUN_VERSION }}" == "canary" ]]; then
|
||||
DOWNLOAD_URL="https://github.com/oven-sh/bun/releases/download/canary/${{ matrix.zip_name }}.zip"
|
||||
else
|
||||
DOWNLOAD_URL="https://github.com/oven-sh/bun/releases/download/${{ env.BUN_VERSION }}/${{ matrix.zip_name }}.zip"
|
||||
fi
|
||||
|
||||
curl -L -o "${{ matrix.zip_name }}.zip" "$DOWNLOAD_URL"
|
||||
|
||||
# Extract the zip file
|
||||
unzip -q "${{ matrix.zip_name }}.zip"
|
||||
|
||||
# Find the bun executable
|
||||
BUN_EXEC=$(find . -name "bun" -type f | head -n 1)
|
||||
if [[ -z "$BUN_EXEC" ]]; then
|
||||
echo "Error: Could not find bun executable in the archive"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Move it to a known location
|
||||
mv "$BUN_EXEC" "./bun"
|
||||
chmod +x ./bun
|
||||
|
||||
# Clean up
|
||||
rm -rf bun-linux-*/ ${{ matrix.zip_name }}.zip
|
||||
- name: Create Self-Extracting Archive
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Create the self-extracting script header
|
||||
cat > extract-header.sh << 'SCRIPT_EOF'
|
||||
#!/bin/sh
|
||||
set -e
|
||||
DEFAULT_INSTALL_DIR="$HOME/.bun/bin"
|
||||
INSTALL_DIR="${BUN_INSTALL_DIR:-$DEFAULT_INSTALL_DIR}"
|
||||
ARCH="$(uname -m)"
|
||||
EXPECTED_ARCH="ARCH_PLACEHOLDER"
|
||||
EXPECTED_DISPLAY="DISPLAY_PLACEHOLDER"
|
||||
case "$ARCH" in
|
||||
x86_64) ARCH_NORM="x64" ;;
|
||||
aarch64|arm64) ARCH_NORM="aarch64" ;;
|
||||
*) ARCH_NORM="$ARCH" ;;
|
||||
esac
|
||||
ARCH_MISMATCH=0
|
||||
case "$EXPECTED_ARCH" in
|
||||
x64) [ "$ARCH_NORM" != "x64" ] && ARCH_MISMATCH=1 ;;
|
||||
aarch64) [ "$ARCH_NORM" != "aarch64" ] && ARCH_MISMATCH=1 ;;
|
||||
esac
|
||||
if [ "$ARCH_MISMATCH" = "1" ]; then
|
||||
echo "Warning: This installer is for $EXPECTED_DISPLAY but you're running on $ARCH"
|
||||
fi
|
||||
mkdir -p "$INSTALL_DIR"
|
||||
ARCHIVE_LINE=$(awk '/^__ARCHIVE_BELOW__/ { print NR + 1; exit }' "$0")
|
||||
echo "Extracting Bun to $INSTALL_DIR..."
|
||||
tail -n +"$ARCHIVE_LINE" "$0" | base64 -d | tar -xzf - -C "$INSTALL_DIR"
|
||||
chmod +x "$INSTALL_DIR/bun"
|
||||
BUN_VERSION=$("$INSTALL_DIR/bun" --version 2>/dev/null || echo "unknown")
|
||||
echo "Bun $BUN_VERSION has been installed to $INSTALL_DIR/bun"
|
||||
echo "Run 'export PATH=\"\$PATH:$INSTALL_DIR\"' to add it to your PATH"
|
||||
exit 0
|
||||
__ARCHIVE_BELOW__
|
||||
SCRIPT_EOF
|
||||
|
||||
# Replace placeholders
|
||||
sed -i "s/ARCH_PLACEHOLDER/${{ matrix.arch }}/g" extract-header.sh
|
||||
sed -i "s/DISPLAY_PLACEHOLDER/${{ matrix.display }}/g" extract-header.sh
|
||||
|
||||
# Create the SFX archive
|
||||
tar -czf bun.tar.gz bun
|
||||
base64 < bun.tar.gz > bun.tar.gz.b64
|
||||
cat extract-header.sh bun.tar.gz.b64 > "${{ matrix.zip_name }}-sfx.sh"
|
||||
chmod +x "${{ matrix.zip_name }}-sfx.sh"
|
||||
|
||||
# Generate checksum
|
||||
sha256sum "${{ matrix.zip_name }}-sfx.sh" > "${{ matrix.zip_name }}-sfx.sh.sha256"
|
||||
- name: Upload to Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Upload the SFX to the release
|
||||
gh release upload "${{ env.BUN_VERSION }}" \
|
||||
"${{ matrix.zip_name }}-sfx.sh" \
|
||||
"${{ matrix.zip_name }}-sfx.sh.sha256" \
|
||||
--clobber \
|
||||
--repo "${{ github.repository }}"
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
541
.github/workflows/sfx-linux.yml
vendored
Normal file
541
.github/workflows/sfx-linux.yml
vendored
Normal file
@@ -0,0 +1,541 @@
|
||||
name: Generate Linux SFX Archives
|
||||
concurrency: sfx-linux
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Release tag (e.g., "bun-v1.0.2", "canary")'
|
||||
required: true
|
||||
default: "canary"
|
||||
upload-to-release:
|
||||
description: "Upload SFX files to the release?"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
|
||||
|
||||
jobs:
|
||||
create-linux-sfx:
|
||||
name: Create Linux Self-Extracting Archives
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64
|
||||
display: x64
|
||||
gcc_arch: x86-64
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64-baseline
|
||||
display: x64-baseline
|
||||
baseline: true
|
||||
gcc_arch: x86-64
|
||||
- arch: aarch64
|
||||
zip_name: bun-linux-aarch64
|
||||
display: aarch64
|
||||
gcc_arch: aarch64
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64-musl
|
||||
display: x64-musl
|
||||
musl: true
|
||||
gcc_arch: x86-64
|
||||
- arch: x64
|
||||
zip_name: bun-linux-x64-musl-baseline
|
||||
display: x64-musl-baseline
|
||||
baseline: true
|
||||
musl: true
|
||||
gcc_arch: x86-64
|
||||
- arch: aarch64
|
||||
zip_name: bun-linux-aarch64-musl
|
||||
display: aarch64-musl
|
||||
musl: true
|
||||
gcc_arch: aarch64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Build Environment
|
||||
run: |
|
||||
echo "Installing build dependencies..."
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc make xxd upx-ucl
|
||||
|
||||
# Install cross-compilation toolchain for aarch64
|
||||
if [[ "${{ matrix.arch }}" == "aarch64" ]]; then
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu
|
||||
fi
|
||||
|
||||
mkdir -p sfx-build
|
||||
cd sfx-build
|
||||
|
||||
- name: Download Bun Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
echo "Downloading ${{ matrix.zip_name }}.zip..."
|
||||
|
||||
if [[ "${{ env.BUN_VERSION }}" == "canary" ]]; then
|
||||
DOWNLOAD_URL="https://github.com/oven-sh/bun/releases/download/canary/${{ matrix.zip_name }}.zip"
|
||||
else
|
||||
DOWNLOAD_URL="https://github.com/oven-sh/bun/releases/download/${{ env.BUN_VERSION }}/${{ matrix.zip_name }}.zip"
|
||||
fi
|
||||
|
||||
curl -L -o "${{ matrix.zip_name }}.zip" "$DOWNLOAD_URL"
|
||||
|
||||
# Extract the zip file
|
||||
unzip -q "${{ matrix.zip_name }}.zip"
|
||||
|
||||
# Find the bun executable
|
||||
BUN_EXEC=$(find . -name "bun" -type f | head -n 1)
|
||||
if [[ -z "$BUN_EXEC" ]]; then
|
||||
echo "Error: Could not find bun executable in the archive"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Move it to a known location
|
||||
mv "$BUN_EXEC" "./bun"
|
||||
chmod +x ./bun
|
||||
|
||||
# Compress the binary
|
||||
echo "Compressing bun binary..."
|
||||
gzip -9 -k ./bun
|
||||
|
||||
# Clean up
|
||||
rm -rf bun-linux-*/ ${{ matrix.zip_name }}.zip
|
||||
|
||||
- name: Create SFX Source Code
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Create the self-extracting executable source
|
||||
cat > sfx.c << 'EOF'
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/stat.h>
|
||||
#include <errno.h>
|
||||
#include <fcntl.h>
|
||||
#include <zlib.h>
|
||||
|
||||
// Terminal colors
|
||||
#define COLOR_RED "\033[0;31m"
|
||||
#define COLOR_GREEN "\033[0;32m"
|
||||
#define COLOR_YELLOW "\033[1;33m"
|
||||
#define COLOR_RESET "\033[0m"
|
||||
|
||||
// Embedded compressed binary data
|
||||
static const unsigned char compressed_data[] = {
|
||||
#include "bun_data.h"
|
||||
};
|
||||
static const size_t compressed_size = sizeof(compressed_data);
|
||||
|
||||
// Installation configuration
|
||||
#ifndef DEFAULT_INSTALL_DIR
|
||||
#define DEFAULT_INSTALL_DIR "/.bun/bin"
|
||||
#endif
|
||||
|
||||
#ifndef BUN_ARCH
|
||||
#define BUN_ARCH "unknown"
|
||||
#endif
|
||||
|
||||
void print_error(const char* msg) {
|
||||
if (isatty(STDERR_FILENO)) {
|
||||
fprintf(stderr, COLOR_RED "error: " COLOR_RESET "%s\n", msg);
|
||||
} else {
|
||||
fprintf(stderr, "error: %s\n", msg);
|
||||
}
|
||||
}
|
||||
|
||||
void print_success(const char* msg) {
|
||||
if (isatty(STDOUT_FILENO)) {
|
||||
printf(COLOR_GREEN "%s" COLOR_RESET "\n", msg);
|
||||
} else {
|
||||
printf("%s\n", msg);
|
||||
}
|
||||
}
|
||||
|
||||
void print_info(const char* msg) {
|
||||
printf("%s\n", msg);
|
||||
}
|
||||
|
||||
int ensure_directory(const char* path) {
|
||||
struct stat st = {0};
|
||||
if (stat(path, &st) == -1) {
|
||||
if (mkdir(path, 0755) == -1 && errno != EEXIST) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int create_directory_tree(const char* path) {
|
||||
char tmp[1024];
|
||||
char *p = NULL;
|
||||
size_t len;
|
||||
|
||||
snprintf(tmp, sizeof(tmp), "%s", path);
|
||||
len = strlen(tmp);
|
||||
if (tmp[len - 1] == '/')
|
||||
tmp[len - 1] = 0;
|
||||
|
||||
for (p = tmp + 1; *p; p++) {
|
||||
if (*p == '/') {
|
||||
*p = 0;
|
||||
if (ensure_directory(tmp) == -1) {
|
||||
return -1;
|
||||
}
|
||||
*p = '/';
|
||||
}
|
||||
}
|
||||
|
||||
if (ensure_directory(tmp) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int decompress_to_file(const char* output_path) {
|
||||
FILE* outfile = fopen(output_path, "wb");
|
||||
if (!outfile) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Decompress using zlib
|
||||
z_stream stream = {0};
|
||||
stream.next_in = (Bytef*)compressed_data;
|
||||
stream.avail_in = compressed_size;
|
||||
|
||||
if (inflateInit2(&stream, 16 + MAX_WBITS) != Z_OK) {
|
||||
fclose(outfile);
|
||||
return -1;
|
||||
}
|
||||
|
||||
unsigned char buffer[8192];
|
||||
int ret;
|
||||
|
||||
do {
|
||||
stream.next_out = buffer;
|
||||
stream.avail_out = sizeof(buffer);
|
||||
|
||||
ret = inflate(&stream, Z_NO_FLUSH);
|
||||
if (ret == Z_STREAM_ERROR) {
|
||||
inflateEnd(&stream);
|
||||
fclose(outfile);
|
||||
return -1;
|
||||
}
|
||||
|
||||
size_t have = sizeof(buffer) - stream.avail_out;
|
||||
if (fwrite(buffer, 1, have, outfile) != have) {
|
||||
inflateEnd(&stream);
|
||||
fclose(outfile);
|
||||
return -1;
|
||||
}
|
||||
} while (ret != Z_STREAM_END);
|
||||
|
||||
inflateEnd(&stream);
|
||||
fclose(outfile);
|
||||
|
||||
// Make executable
|
||||
if (chmod(output_path, 0755) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
printf("Bun Self-Extracting Archive for Linux %s\n", BUN_ARCH);
|
||||
printf("=====================================\n\n");
|
||||
|
||||
// Determine installation directory
|
||||
const char* install_dir_env = getenv("BUN_INSTALL_DIR");
|
||||
char install_dir[1024];
|
||||
|
||||
if (install_dir_env && install_dir_env[0] != '\0') {
|
||||
snprintf(install_dir, sizeof(install_dir), "%s", install_dir_env);
|
||||
} else {
|
||||
const char* home = getenv("HOME");
|
||||
if (!home) {
|
||||
print_error("Could not determine home directory");
|
||||
return 1;
|
||||
}
|
||||
snprintf(install_dir, sizeof(install_dir), "%s%s", home, DEFAULT_INSTALL_DIR);
|
||||
}
|
||||
|
||||
printf("Installation directory: %s\n", install_dir);
|
||||
|
||||
// Create installation directory
|
||||
if (create_directory_tree(install_dir) == -1) {
|
||||
char error_msg[1024];
|
||||
snprintf(error_msg, sizeof(error_msg), "Failed to create directory: %s", install_dir);
|
||||
print_error(error_msg);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Extract bun executable
|
||||
char bun_path[1024];
|
||||
snprintf(bun_path, sizeof(bun_path), "%s/bun", install_dir);
|
||||
|
||||
printf("Extracting bun...\n");
|
||||
if (decompress_to_file(bun_path) == -1) {
|
||||
print_error("Failed to extract bun executable");
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Verify installation
|
||||
char version_cmd[1024];
|
||||
snprintf(version_cmd, sizeof(version_cmd), "%s --version 2>/dev/null", bun_path);
|
||||
FILE* fp = popen(version_cmd, "r");
|
||||
if (fp) {
|
||||
char version[128];
|
||||
if (fgets(version, sizeof(version), fp) != NULL) {
|
||||
// Remove newline
|
||||
size_t len = strlen(version);
|
||||
if (len > 0 && version[len-1] == '\n') {
|
||||
version[len-1] = '\0';
|
||||
}
|
||||
char success_msg[256];
|
||||
snprintf(success_msg, sizeof(success_msg), "✓ Bun %s has been installed to %s", version, bun_path);
|
||||
print_success(success_msg);
|
||||
}
|
||||
pclose(fp);
|
||||
}
|
||||
|
||||
// Check if in PATH
|
||||
const char* path_env = getenv("PATH");
|
||||
if (path_env && !strstr(path_env, install_dir)) {
|
||||
printf("\n");
|
||||
print_info("To get started, add Bun to your PATH:");
|
||||
printf(" export PATH=\"$PATH:%s\"\n", install_dir);
|
||||
printf("\n");
|
||||
print_info("To make this permanent, add it to your shell configuration file:");
|
||||
|
||||
const char* shell = getenv("SHELL");
|
||||
if (shell) {
|
||||
if (strstr(shell, "bash")) {
|
||||
printf(" echo 'export PATH=\"$PATH:%s\"' >> ~/.bashrc\n", install_dir);
|
||||
} else if (strstr(shell, "zsh")) {
|
||||
printf(" echo 'export PATH=\"$PATH:%s\"' >> ~/.zshrc\n", install_dir);
|
||||
} else if (strstr(shell, "fish")) {
|
||||
printf(" echo 'set -gx PATH $PATH %s' >> ~/.config/fish/config.fish\n", install_dir);
|
||||
} else {
|
||||
printf(" echo 'export PATH=\"$PATH:%s\"' >> ~/.profile\n", install_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
EOF
|
||||
|
||||
# Convert the compressed binary to a C header file
|
||||
echo "Converting binary to C array..."
|
||||
xxd -i bun.gz | sed 's/unsigned char bun_gz\[\]//' | sed 's/unsigned int bun_gz_len = .*//' > bun_data.h
|
||||
|
||||
- name: Build Self-Extracting Executable
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Select compiler based on architecture and libc
|
||||
if [[ "${{ matrix.musl }}" == "true" ]]; then
|
||||
# For musl builds, we need musl-gcc
|
||||
sudo apt-get install -y musl-tools
|
||||
if [[ "${{ matrix.arch }}" == "aarch64" ]]; then
|
||||
# Cross-compile for aarch64 musl - this is more complex
|
||||
# For now, skip cross-compilation for musl
|
||||
echo "Warning: Cross-compilation for aarch64-musl not yet supported"
|
||||
exit 0
|
||||
else
|
||||
CC=musl-gcc
|
||||
fi
|
||||
elif [[ "${{ matrix.arch }}" == "aarch64" ]]; then
|
||||
CC=aarch64-linux-gnu-gcc
|
||||
else
|
||||
CC=gcc
|
||||
fi
|
||||
|
||||
# Compile the self-extracting executable
|
||||
echo "Building self-extracting executable..."
|
||||
$CC -static -O3 -s \
|
||||
-DBUN_ARCH="\"${{ matrix.display }}\"" \
|
||||
sfx.c -o "${{ matrix.zip_name }}" \
|
||||
-lz
|
||||
|
||||
# Compress the executable with UPX for smaller size
|
||||
echo "Compressing executable with UPX..."
|
||||
upx --best --lzma "${{ matrix.zip_name }}" || true
|
||||
|
||||
# Make it executable
|
||||
chmod +x "${{ matrix.zip_name }}"
|
||||
|
||||
# Show final size
|
||||
echo "Final executable size:"
|
||||
ls -lh "${{ matrix.zip_name }}"
|
||||
|
||||
- name: Test Self-Extracting Archive
|
||||
if: ${{ matrix.arch != 'aarch64' && matrix.musl != 'true' }} # Can't test aarch64 or musl on x64 runner
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
echo "Testing self-extraction..."
|
||||
TEST_DIR="$(mktemp -d)"
|
||||
BUN_INSTALL_DIR="$TEST_DIR" ./${{ matrix.zip_name }}
|
||||
|
||||
if [ -x "$TEST_DIR/bun" ]; then
|
||||
echo "✓ Self-extraction test passed"
|
||||
"$TEST_DIR/bun" --version
|
||||
rm -rf "$TEST_DIR"
|
||||
else
|
||||
echo "✗ Self-extraction test failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate Checksums
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Generate SHA256 checksums
|
||||
sha256sum "${{ matrix.zip_name }}" > "${{ matrix.zip_name }}.sha256"
|
||||
|
||||
echo "Generated files:"
|
||||
ls -la ${{ matrix.zip_name }}*
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.zip_name }}-sfx
|
||||
path: |
|
||||
sfx-build/${{ matrix.zip_name }}
|
||||
sfx-build/${{ matrix.zip_name }}.sha256
|
||||
|
||||
- name: Upload to Release
|
||||
if: ${{ (github.event_name == 'release' || github.event.inputs.upload-to-release == 'true') && env.BUN_VERSION != 'canary' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
cd sfx-build
|
||||
|
||||
# Upload the SFX to the release
|
||||
gh release upload "${{ env.BUN_VERSION }}" \
|
||||
"${{ matrix.zip_name }}" \
|
||||
"${{ matrix.zip_name }}.sha256" \
|
||||
--clobber \
|
||||
--repo "${{ github.repository }}"
|
||||
|
||||
create-universal-linux-installer:
|
||||
name: Create Universal Linux Installer
|
||||
runs-on: ubuntu-latest
|
||||
needs: create-linux-sfx
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download SFX Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: bun-linux-*-sfx
|
||||
merge-multiple: true
|
||||
path: sfx-files
|
||||
|
||||
- name: Create Universal Installer
|
||||
run: |
|
||||
cd sfx-files
|
||||
|
||||
# Create a universal installer that detects architecture
|
||||
cat > bun-linux-install.sh << 'UNIVERSAL_EOF'
|
||||
#!/bin/sh
|
||||
# Universal Bun installer for Linux (detects architecture automatically)
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="VERSION_PLACEHOLDER"
|
||||
|
||||
# Detect architecture
|
||||
ARCH="$(uname -m)"
|
||||
case "$ARCH" in
|
||||
x86_64)
|
||||
# Check CPU features for baseline support
|
||||
if [ -f /proc/cpuinfo ]; then
|
||||
# Check for AVX2 support (non-baseline requirement)
|
||||
if grep -q "avx2" /proc/cpuinfo 2>/dev/null; then
|
||||
ARCHIVE_NAME="bun-linux-x64"
|
||||
echo "Detected: x64 (with AVX2 support)"
|
||||
else
|
||||
ARCHIVE_NAME="bun-linux-x64-baseline"
|
||||
echo "Detected: x64 (baseline - no AVX2)"
|
||||
fi
|
||||
else
|
||||
# Default to baseline if we can't detect
|
||||
ARCHIVE_NAME="bun-linux-x64-baseline"
|
||||
echo "Detected: x64 (defaulting to baseline)"
|
||||
fi
|
||||
;;
|
||||
aarch64|arm64)
|
||||
ARCHIVE_NAME="bun-linux-aarch64"
|
||||
echo "Detected: ARM64/AArch64"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unsupported architecture: $ARCH"
|
||||
echo "Bun supports x64 and aarch64 on Linux"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Downloading Bun $VERSION for Linux..."
|
||||
|
||||
if [ "$VERSION" = "canary" ]; then
|
||||
URL="https://github.com/oven-sh/bun/releases/download/canary/${ARCHIVE_NAME}-sfx.sh"
|
||||
else
|
||||
URL="https://github.com/oven-sh/bun/releases/download/$VERSION/${ARCHIVE_NAME}-sfx.sh"
|
||||
fi
|
||||
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
curl -fsSL "$URL" | sh
|
||||
elif command -v wget >/dev/null 2>&1; then
|
||||
wget -qO- "$URL" | sh
|
||||
else
|
||||
echo "Error: Neither curl nor wget found. Please install one of them."
|
||||
exit 1
|
||||
fi
|
||||
UNIVERSAL_EOF
|
||||
|
||||
# Replace version placeholder
|
||||
sed -i "s/VERSION_PLACEHOLDER/${{ env.BUN_VERSION }}/g" bun-linux-install.sh
|
||||
|
||||
chmod +x bun-linux-install.sh
|
||||
|
||||
# Generate checksum
|
||||
sha256sum bun-linux-install.sh > bun-linux-install.sh.sha256
|
||||
|
||||
- name: Upload Universal Installer
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-linux-universal-sfx
|
||||
path: |
|
||||
sfx-files/bun-linux-install.sh
|
||||
sfx-files/bun-linux-install.sh.sha256
|
||||
|
||||
- name: Upload to Release
|
||||
if: ${{ (github.event_name == 'release' || github.event.inputs.upload-to-release == 'true') && env.BUN_VERSION != 'canary' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
cd sfx-files
|
||||
|
||||
gh release upload "${{ env.BUN_VERSION }}" \
|
||||
"bun-linux-install.sh" \
|
||||
"bun-linux-install.sh.sha256" \
|
||||
--clobber \
|
||||
--repo "${{ github.repository }}"
|
||||
4
.github/workflows/update-cares.yml
vendored
4
.github/workflows/update-cares.yml
vendored
@@ -50,12 +50,12 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
|
||||
2
.github/workflows/update-lolhtml.yml
vendored
2
.github/workflows/update-lolhtml.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
|
||||
2
.github/workflows/update-lshpack.yml
vendored
2
.github/workflows/update-lshpack.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
|
||||
99
.github/workflows/update-zstd.yml
vendored
Normal file
99
.github/workflows/update-zstd.yml
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
name: Update zstd
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check zstd version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildZstd.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildZstd.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildZstd.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildZstd.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildZstd.cmake
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-zstd-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates zstd to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -183,4 +183,4 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
*.bun-build/bun/
|
||||
|
||||
17
.lldbinit
17
.lldbinit
@@ -1,16 +1 @@
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
command source -C -s true -e true misctools/lldb/init.lldb
|
||||
|
||||
338
.vscode/launch.json
generated
vendored
338
.vscode/launch.json
generated
vendored
@@ -5,6 +5,9 @@
|
||||
// - FORCE_COLOR=1 forces colors in the terminal
|
||||
// - "${workspaceFolder}/test" is the cwd for `bun test` so it matches CI, we should fix this later
|
||||
// - "cppvsdbg" is used instead of "lldb" on Windows, because "lldb" is too slow
|
||||
// - Seeing WebKit files requires `vendor/WebKit` to exist and have code from the right commit.
|
||||
// Run `bun sync-webkit-source` to ensure that folder is at the right commit. If you haven't
|
||||
// cloned it at all, that script will suggest how.
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
// bun test [file]
|
||||
@@ -13,7 +16,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -21,14 +24,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --only",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -36,20 +46,35 @@
|
||||
"BUN_DEBUG_jest": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"name": "Attach",
|
||||
"request": "attach",
|
||||
"pid": "${command:pickMyProcess}",
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -57,14 +82,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
@@ -72,14 +104,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "--watch", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -87,14 +126,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--hot", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "--hot", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -102,14 +148,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -118,7 +171,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -130,7 +190,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -139,7 +199,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -160,7 +227,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -178,7 +252,14 @@
|
||||
"GOMAXPROCS": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -192,7 +273,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -202,14 +290,18 @@
|
||||
"args": ["run", "--watch", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
// "BUN_DEBUG_DEBUGGER": "1",
|
||||
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -223,7 +315,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -239,7 +338,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -260,7 +366,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -273,7 +386,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [...]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -281,14 +394,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [...] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -296,14 +416,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [...] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -311,14 +438,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [...] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -326,14 +460,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [...] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -341,14 +482,21 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [...] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -357,7 +505,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?wait=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -369,7 +524,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [...] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -378,7 +533,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/?break=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -398,7 +560,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
// bun test [*]
|
||||
{
|
||||
@@ -413,7 +582,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -427,7 +603,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -442,7 +625,14 @@
|
||||
"BUN_INSPECT": "ws://localhost:0/",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -461,7 +651,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -475,7 +672,14 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"sourceMap": {
|
||||
// macOS
|
||||
"/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// linux
|
||||
"/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit",
|
||||
"/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
},
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
{
|
||||
@@ -486,7 +690,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -511,7 +715,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test --only [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -536,7 +740,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -561,7 +765,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -586,7 +790,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -620,7 +824,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"args": ["test", "--timeout=3600000", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -787,7 +991,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -812,7 +1016,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -837,7 +1041,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -862,7 +1066,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -887,7 +1091,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -912,7 +1116,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -946,7 +1150,7 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"args": ["test", "--timeout=3600000", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
@@ -1113,6 +1317,17 @@
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "bun",
|
||||
"name": "[JS] bun run [file]",
|
||||
"runtime": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"runtimeArgs": ["run", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "midas-rr",
|
||||
"request": "attach",
|
||||
@@ -1122,6 +1337,11 @@
|
||||
"handle SIGPWR nostop noprint pass",
|
||||
"source ${workspaceFolder}/misctools/gdb/std_gdb_pretty_printers.py",
|
||||
"source ${workspaceFolder}/misctools/gdb/zig_gdb_pretty_printers.py",
|
||||
"set substitute-path /webkitbuild/vendor/WebKit ${workspaceFolder}/vendor/WebKit",
|
||||
"set substitute-path /webkitbuild/.WTF/Headers ${workspaceFolder}/vendor/WebKit/Source/WTF",
|
||||
// uncomment if you like
|
||||
// "set disassembly-flavor intel",
|
||||
"set print asm-demangle",
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
36
AGENTS.md
Normal file
36
AGENTS.md
Normal file
@@ -0,0 +1,36 @@
|
||||
## bun tests
|
||||
|
||||
**IMPORTANT**: use the `bun bd` command instead of the `bun` command. For example:
|
||||
|
||||
✅ Good
|
||||
|
||||
```sh
|
||||
bun bd test internal/ban-words.test.ts
|
||||
bun bd ./foo.ts
|
||||
```
|
||||
|
||||
The `bun bd` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected..
|
||||
|
||||
### Run a file
|
||||
|
||||
To run a file, you can use the `bun bd <file-path>` command.
|
||||
|
||||
```sh
|
||||
bun bd ./foo.ts
|
||||
```
|
||||
|
||||
### Run tests
|
||||
|
||||
To run a single test, you need to use the `bun bd test <test-name>` command.
|
||||
|
||||
```sh
|
||||
bun bd test internal/ban-words.test.ts
|
||||
```
|
||||
|
||||
You must ALWAYS make sure to pass a file path to the `bun bd test <file-path>` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory.
|
||||
|
||||
### Run a Node.js test
|
||||
|
||||
```sh
|
||||
bun bd --silent node:test test-fs-link
|
||||
```
|
||||
2
Makefile
2
Makefile
@@ -482,7 +482,7 @@ STATIC_MUSL_FLAG ?=
|
||||
WRAP_SYMBOLS_ON_LINUX =
|
||||
|
||||
ifeq ($(OS_NAME), linux)
|
||||
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=log -Wl,--wrap=log2 \
|
||||
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=exp2 -Wl,--wrap=log -Wl,--wrap=log2 \
|
||||
-Wl,--wrap=lstat \
|
||||
-Wl,--wrap=stat \
|
||||
-Wl,--wrap=fstat \
|
||||
|
||||
@@ -47,6 +47,8 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
|
||||
> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.sh/docs/installation#cpu-requirements-and-baseline-builds)
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
@@ -50,6 +50,10 @@ bench("murmur64v2 (short)", () => {
|
||||
Bun.hash.murmur64v2(shortStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (short)", () => {
|
||||
Bun.hash.rapidhash(shortStr);
|
||||
});
|
||||
|
||||
bench("wyhash (128 KB)", () => {
|
||||
Bun.hash.wyhash(longStr);
|
||||
});
|
||||
@@ -94,4 +98,8 @@ bench("murmur64v2 (128 KB)", () => {
|
||||
Bun.hash.murmur64v2(longStr);
|
||||
});
|
||||
|
||||
bench("rapidhash (128 KB)", () => {
|
||||
Bun.hash.rapidhash(longStr);
|
||||
});
|
||||
|
||||
run();
|
||||
|
||||
1
bun
Submodule
1
bun
Submodule
Submodule bun added at f62940bbda
@@ -88,7 +88,7 @@ endif()
|
||||
if(UNIX)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable debug symbols"
|
||||
-g3 ${DEBUG}
|
||||
-g3 -gz=zstd ${DEBUG}
|
||||
-g1 ${RELEASE}
|
||||
)
|
||||
|
||||
|
||||
@@ -44,10 +44,12 @@
|
||||
"src/bun.js/bindings/webcrypto/*/*.cpp",
|
||||
"src/bun.js/bindings/node/*.cpp",
|
||||
"src/bun.js/bindings/node/crypto/*.cpp",
|
||||
"src/bun.js/bindings/node/http/*.cpp",
|
||||
"src/bun.js/bindings/v8/*.cpp",
|
||||
"src/bun.js/bindings/v8/shim/*.cpp",
|
||||
"src/bake/*.cpp",
|
||||
"src/deps/*.cpp",
|
||||
"src/vm/*.cpp",
|
||||
"packages/bun-usockets/src/crypto/*.cpp"
|
||||
]
|
||||
},
|
||||
@@ -59,7 +61,9 @@
|
||||
"packages/bun-usockets/src/internal/*.c",
|
||||
"packages/bun-usockets/src/crypto/*.c",
|
||||
"src/bun.js/bindings/uv-posix-polyfills.c",
|
||||
"src/bun.js/bindings/uv-posix-stubs.c"
|
||||
"src/bun.js/bindings/uv-posix-stubs.c",
|
||||
"src/*.c",
|
||||
"src/bun.js/bindings/node/http/llhttp/*.c"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -20,4 +20,4 @@ src/bake/hmr-runtime-client.ts
|
||||
src/bake/hmr-runtime-error.ts
|
||||
src/bake/hmr-runtime-server.ts
|
||||
src/bake/server/stack-trace-stub.ts
|
||||
src/bake/shared.ts
|
||||
src/bake/shared.ts
|
||||
@@ -4,4 +4,4 @@ src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
src/bun.js/bindings/NodeModuleModule.bind.ts
|
||||
src/bun.js/node/node_os.bind.ts
|
||||
src/fmt.bind.ts
|
||||
src/fmt.bind.ts
|
||||
@@ -9,4 +9,4 @@ packages/bun-error/package.json
|
||||
packages/bun-error/runtime-error.ts
|
||||
packages/bun-error/sourcemap.ts
|
||||
packages/bun-error/stack-trace-parser.ts
|
||||
packages/bun-error/tsconfig.json
|
||||
packages/bun-error/tsconfig.json
|
||||
@@ -7,6 +7,9 @@ packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
src/asan-config.c
|
||||
@@ -80,6 +80,9 @@ src/bun.js/bindings/JSEnvironmentVariableMap.cpp
|
||||
src/bun.js/bindings/JSFFIFunction.cpp
|
||||
src/bun.js/bindings/JSMockFunction.cpp
|
||||
src/bun.js/bindings/JSNextTickQueue.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
@@ -141,6 +144,13 @@ src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
@@ -153,6 +163,10 @@ src/bun.js/bindings/NodeTLS.cpp
|
||||
src/bun.js/bindings/NodeURL.cpp
|
||||
src/bun.js/bindings/NodeValidator.cpp
|
||||
src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
@@ -161,6 +175,7 @@ src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
@@ -409,6 +424,7 @@ src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
|
||||
@@ -449,6 +465,7 @@ src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
|
||||
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
|
||||
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
|
||||
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
|
||||
@@ -463,4 +480,6 @@ src/bun.js/modules/NodeTTYModule.cpp
|
||||
src/bun.js/modules/NodeUtilTypesModule.cpp
|
||||
src/bun.js/modules/ObjectModule.cpp
|
||||
src/deps/libuwsockets.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/vm/Semaphore.cpp
|
||||
src/vm/SigintWatcher.cpp
|
||||
@@ -15,4 +15,4 @@ src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/replacements.ts
|
||||
@@ -52,6 +52,7 @@ src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
@@ -160,4 +161,4 @@ src/js/thirdparty/node-fetch.ts
|
||||
src/js/thirdparty/undici.js
|
||||
src/js/thirdparty/vercel_fetch.js
|
||||
src/js/thirdparty/ws.js
|
||||
src/js/wasi-runner.js
|
||||
src/js/wasi-runner.js
|
||||
@@ -17,7 +17,8 @@ src/node-fallbacks/stream.js
|
||||
src/node-fallbacks/string_decoder.js
|
||||
src/node-fallbacks/sys.js
|
||||
src/node-fallbacks/timers.js
|
||||
src/node-fallbacks/timers.promises.js
|
||||
src/node-fallbacks/tty.js
|
||||
src/node-fallbacks/url.js
|
||||
src/node-fallbacks/util.js
|
||||
src/node-fallbacks/zlib.js
|
||||
src/node-fallbacks/zlib.js
|
||||
@@ -20,4 +20,4 @@ src/bun.js/node/node.classes.ts
|
||||
src/bun.js/resolve_message.classes.ts
|
||||
src/bun.js/test/jest.classes.ts
|
||||
src/bun.js/webcore/encoding.classes.ts
|
||||
src/bun.js/webcore/response.classes.ts
|
||||
src/bun.js/webcore/response.classes.ts
|
||||
@@ -55,11 +55,17 @@ src/bun.js/api/html_rewriter.zig
|
||||
src/bun.js/api/JSBundler.zig
|
||||
src/bun.js/api/JSTranspiler.zig
|
||||
src/bun.js/api/server.zig
|
||||
src/bun.js/api/server/AnyRequestContext.zig
|
||||
src/bun.js/api/server/HTMLBundle.zig
|
||||
src/bun.js/api/server/HTTPStatusText.zig
|
||||
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
|
||||
src/bun.js/api/server/NodeHTTPResponse.zig
|
||||
src/bun.js/api/server/RequestContext.zig
|
||||
src/bun.js/api/server/ServerConfig.zig
|
||||
src/bun.js/api/server/ServerWebSocket.zig
|
||||
src/bun.js/api/server/SSLConfig.zig
|
||||
src/bun.js/api/server/StaticRoute.zig
|
||||
src/bun.js/api/server/WebSocketServerContext.zig
|
||||
src/bun.js/api/streams.classes.zig
|
||||
src/bun.js/api/Timer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
@@ -86,8 +92,6 @@ src/bun.js/bindings/Exception.zig
|
||||
src/bun.js/bindings/FetchHeaders.zig
|
||||
src/bun.js/bindings/FFI.zig
|
||||
src/bun.js/bindings/generated_classes_list.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GetterSetter.zig
|
||||
src/bun.js/bindings/HTTPServerAgent.zig
|
||||
src/bun.js/bindings/JSArray.zig
|
||||
@@ -107,6 +111,7 @@ src/bun.js/bindings/JSPropertyIterator.zig
|
||||
src/bun.js/bindings/JSRef.zig
|
||||
src/bun.js/bindings/JSRuntimeType.zig
|
||||
src/bun.js/bindings/JSString.zig
|
||||
src/bun.js/bindings/JSType.zig
|
||||
src/bun.js/bindings/JSUint8Array.zig
|
||||
src/bun.js/bindings/JSValue.zig
|
||||
src/bun.js/bindings/NodeModuleModule.zig
|
||||
@@ -135,6 +140,21 @@ src/bun.js/ConsoleObject.zig
|
||||
src/bun.js/Counters.zig
|
||||
src/bun.js/Debugger.zig
|
||||
src/bun.js/event_loop.zig
|
||||
src/bun.js/event_loop/AnyEventLoop.zig
|
||||
src/bun.js/event_loop/AnyTask.zig
|
||||
src/bun.js/event_loop/AnyTaskWithExtraContext.zig
|
||||
src/bun.js/event_loop/ConcurrentPromiseTask.zig
|
||||
src/bun.js/event_loop/ConcurrentTask.zig
|
||||
src/bun.js/event_loop/CppTask.zig
|
||||
src/bun.js/event_loop/DeferredTaskQueue.zig
|
||||
src/bun.js/event_loop/EventLoopHandle.zig
|
||||
src/bun.js/event_loop/GarbageCollectionController.zig
|
||||
src/bun.js/event_loop/JSCScheduler.zig
|
||||
src/bun.js/event_loop/ManagedTask.zig
|
||||
src/bun.js/event_loop/MiniEventLoop.zig
|
||||
src/bun.js/event_loop/PosixSignalHandle.zig
|
||||
src/bun.js/event_loop/Task.zig
|
||||
src/bun.js/event_loop/WorkTask.zig
|
||||
src/bun.js/hot_reloader.zig
|
||||
src/bun.js/ipc.zig
|
||||
src/bun.js/javascript_core_c_api.zig
|
||||
@@ -222,6 +242,7 @@ src/bun.js/webcore/Response.zig
|
||||
src/bun.js/webcore/S3Client.zig
|
||||
src/bun.js/webcore/S3File.zig
|
||||
src/bun.js/webcore/S3Stat.zig
|
||||
src/bun.js/webcore/ScriptExecutionContext.zig
|
||||
src/bun.js/webcore/Sink.zig
|
||||
src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
@@ -237,6 +258,7 @@ src/ci_info.zig
|
||||
src/cli.zig
|
||||
src/cli/add_command.zig
|
||||
src/cli/add_completions.zig
|
||||
src/cli/audit_command.zig
|
||||
src/cli/build_command.zig
|
||||
src/cli/bunx_command.zig
|
||||
src/cli/colon_list_type.zig
|
||||
@@ -256,6 +278,7 @@ src/cli/package_manager_command.zig
|
||||
src/cli/patch_command.zig
|
||||
src/cli/patch_commit_command.zig
|
||||
src/cli/pm_trusted_command.zig
|
||||
src/cli/pm_view_command.zig
|
||||
src/cli/publish_command.zig
|
||||
src/cli/remove_command.zig
|
||||
src/cli/run_command.zig
|
||||
@@ -383,7 +406,22 @@ src/deps/picohttp.zig
|
||||
src/deps/picohttpparser.zig
|
||||
src/deps/tcc.zig
|
||||
src/deps/uws.zig
|
||||
src/deps/uws/App.zig
|
||||
src/deps/uws/BodyReaderMixin.zig
|
||||
src/deps/uws/ConnectingSocket.zig
|
||||
src/deps/uws/InternalLoopData.zig
|
||||
src/deps/uws/ListenSocket.zig
|
||||
src/deps/uws/Loop.zig
|
||||
src/deps/uws/Request.zig
|
||||
src/deps/uws/Response.zig
|
||||
src/deps/uws/socket.zig
|
||||
src/deps/uws/SocketContext.zig
|
||||
src/deps/uws/Timer.zig
|
||||
src/deps/uws/udp.zig
|
||||
src/deps/uws/UpgradedDuplex.zig
|
||||
src/deps/uws/us_socket_t.zig
|
||||
src/deps/uws/WebSocket.zig
|
||||
src/deps/uws/WindowsNamedPipe.zig
|
||||
src/deps/zig-clap/clap.zig
|
||||
src/deps/zig-clap/clap/args.zig
|
||||
src/deps/zig-clap/clap/comptime.zig
|
||||
@@ -432,15 +470,30 @@ src/identity_context.zig
|
||||
src/import_record.zig
|
||||
src/ini.zig
|
||||
src/install/bin.zig
|
||||
src/install/bun.lock.zig
|
||||
src/install/dependency.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/lifecycle_script_runner.zig
|
||||
src/install/lockfile.zig
|
||||
src/install/lockfile/Buffers.zig
|
||||
src/install/lockfile/bun.lock.zig
|
||||
src/install/lockfile/bun.lockb.zig
|
||||
src/install/lockfile/CatalogMap.zig
|
||||
src/install/lockfile/lockfile_json_stringify_for_debugging.zig
|
||||
src/install/lockfile/OverrideMap.zig
|
||||
src/install/lockfile/Package.zig
|
||||
src/install/lockfile/Package/Meta.zig
|
||||
src/install/lockfile/Package/Scripts.zig
|
||||
src/install/lockfile/Package/WorkspaceMap.zig
|
||||
src/install/lockfile/printer/tree_printer.zig
|
||||
src/install/lockfile/printer/Yarn.zig
|
||||
src/install/lockfile/Tree.zig
|
||||
src/install/migration.zig
|
||||
src/install/npm.zig
|
||||
src/install/PackageManager/CommandLineArguments.zig
|
||||
src/install/PackageManager/PackageJSONEditor.zig
|
||||
src/install/PackageManager/PackageManagerOptions.zig
|
||||
src/install/padding_checker.zig
|
||||
src/install/patch_install.zig
|
||||
src/install/repository.zig
|
||||
@@ -456,7 +509,6 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/io/time.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
@@ -607,4 +659,4 @@ src/windows.zig
|
||||
src/work_pool.zig
|
||||
src/workaround_missing_symbols.zig
|
||||
src/wyhash.zig
|
||||
src/zlib.zig
|
||||
src/zlib.zig
|
||||
@@ -46,7 +46,7 @@ endif()
|
||||
|
||||
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
|
||||
|
||||
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/BunErrorSources.txt)
|
||||
absolute_sources(BUN_ERROR_SOURCES ${CWD}/cmake/sources/BunErrorSources.txt)
|
||||
|
||||
set(BUN_ERROR_OUTPUT ${CODEGEN_PATH}/bun-error)
|
||||
set(BUN_ERROR_OUTPUTS
|
||||
@@ -135,7 +135,7 @@ register_command(
|
||||
|
||||
set(BUN_NODE_FALLBACKS_SOURCE ${CWD}/src/node-fallbacks)
|
||||
|
||||
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/NodeFallbacksSources.txt)
|
||||
absolute_sources(BUN_NODE_FALLBACKS_SOURCES ${CWD}/cmake/sources/NodeFallbacksSources.txt)
|
||||
|
||||
set(BUN_NODE_FALLBACKS_OUTPUT ${CODEGEN_PATH}/node-fallbacks)
|
||||
set(BUN_NODE_FALLBACKS_OUTPUTS)
|
||||
@@ -161,14 +161,9 @@ register_command(
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} x
|
||||
esbuild ${ESBUILD_ARGS}
|
||||
${BUN_EXECUTABLE} run build-fallbacks
|
||||
${BUN_NODE_FALLBACKS_OUTPUT}
|
||||
${BUN_NODE_FALLBACKS_SOURCES}
|
||||
--outdir=${BUN_NODE_FALLBACKS_OUTPUT}
|
||||
--format=esm
|
||||
--minify
|
||||
--bundle
|
||||
--platform=browser
|
||||
SOURCES
|
||||
${BUN_NODE_FALLBACKS_SOURCES}
|
||||
${BUN_NODE_FALLBACKS_NODE_MODULES}
|
||||
@@ -235,7 +230,7 @@ register_command(
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/ZigGeneratedClassesSources.txt)
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
@@ -268,8 +263,8 @@ register_command(
|
||||
|
||||
set(BUN_JAVASCRIPT_CODEGEN_SCRIPT ${CWD}/src/codegen/bundle-modules.ts)
|
||||
|
||||
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/JavaScriptSources.txt)
|
||||
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/JavaScriptCodegenSources.txt)
|
||||
absolute_sources(BUN_JAVASCRIPT_SOURCES ${CWD}/cmake/sources/JavaScriptSources.txt)
|
||||
absolute_sources(BUN_JAVASCRIPT_CODEGEN_SOURCES ${CWD}/cmake/sources/JavaScriptCodegenSources.txt)
|
||||
|
||||
list(APPEND BUN_JAVASCRIPT_CODEGEN_SOURCES
|
||||
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
@@ -311,7 +306,7 @@ register_command(
|
||||
|
||||
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
|
||||
|
||||
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/BakeRuntimeSources.txt)
|
||||
absolute_sources(BUN_BAKE_RUNTIME_SOURCES ${CWD}/cmake/sources/BakeRuntimeSources.txt)
|
||||
|
||||
list(APPEND BUN_BAKE_RUNTIME_CODEGEN_SOURCES
|
||||
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
@@ -344,7 +339,7 @@ register_command(
|
||||
|
||||
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
|
||||
|
||||
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/BindgenSources.txt)
|
||||
absolute_sources(BUN_BINDGEN_SOURCES ${CWD}/cmake/sources/BindgenSources.txt)
|
||||
|
||||
set(BUN_BINDGEN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/GeneratedBindings.cpp
|
||||
@@ -413,6 +408,7 @@ set(BUN_OBJECT_LUT_SOURCES
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
${CWD}/src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
|
||||
)
|
||||
@@ -426,6 +422,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingFs.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
|
||||
${CODEGEN_PATH}/ProcessBindingHTTPParser.lut.h
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
|
||||
)
|
||||
@@ -501,7 +498,7 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
|
||||
# --- Zig ---
|
||||
|
||||
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/ZigSources.txt)
|
||||
absolute_sources(BUN_ZIG_SOURCES ${CWD}/cmake/sources/ZigSources.txt)
|
||||
|
||||
list(APPEND BUN_ZIG_SOURCES
|
||||
${CWD}/build.zig
|
||||
@@ -598,8 +595,8 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/CSources.txt)
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
@@ -737,6 +734,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${CWD}/src/bun.js/bindings/webcore
|
||||
${CWD}/src/bun.js/bindings/webcrypto
|
||||
${CWD}/src/bun.js/bindings/node/crypto
|
||||
${CWD}/src/bun.js/bindings/node/http
|
||||
${CWD}/src/bun.js/bindings/sqlite
|
||||
${CWD}/src/bun.js/bindings/v8
|
||||
${CWD}/src/bun.js/modules
|
||||
@@ -749,7 +747,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${NODEJS_HEADERS_PATH}/include
|
||||
)
|
||||
|
||||
if(NOT WIN32)
|
||||
if(NOT WIN32)
|
||||
target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/libuv)
|
||||
endif()
|
||||
|
||||
@@ -882,7 +880,7 @@ if(NOT WIN32)
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
)
|
||||
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
@@ -940,6 +938,7 @@ if(LINUX)
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=exp2
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=log
|
||||
@@ -1019,6 +1018,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1027,6 +1027,7 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
@@ -1062,6 +1063,7 @@ set(BUN_DEPENDENCIES
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
c-ares/c-ares
|
||||
COMMIT
|
||||
4f4912bce7374f787b10576851b687935f018e17
|
||||
d3a507e920e7af18a5efb7f9f1d8044ed4750013
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
24
cmake/targets/BuildHdrHistogram.cmake
Normal file
24
cmake/targets/BuildHdrHistogram.cmake
Normal file
@@ -0,0 +1,24 @@
|
||||
register_repository(
|
||||
NAME
|
||||
hdrhistogram
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
hdrhistogram
|
||||
LIBRARIES
|
||||
hdr_histogram_static
|
||||
INCLUDES
|
||||
include
|
||||
LIB_PATH
|
||||
src
|
||||
ARGS
|
||||
-DHDR_HISTOGRAM_BUILD_SHARED=OFF
|
||||
-DHDR_HISTOGRAM_BUILD_STATIC=ON
|
||||
-DHDR_LOG_REQUIRED=DISABLED
|
||||
-DHDR_HISTOGRAM_BUILD_PROGRAMS=OFF
|
||||
)
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
ebiggers/libdeflate
|
||||
COMMIT
|
||||
78051988f96dc8d8916310d8b24021f01bd9e102
|
||||
96836d7d9d10e3e0d53e6edb54eb908514e336c4
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
facebook/zstd
|
||||
COMMIT
|
||||
794ea1b0afca0f020f4e57b6732332231fb23c70
|
||||
f8745da6ff1ad1e7bab384bd1f9d742439278e99
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION eda8b0fb4fb1aa23db9c2b00933df8b58bcdd289)
|
||||
set(WEBKIT_VERSION 85b01f72bb53299e75bd0889ee67431a84c7bdb6)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
@@ -41,14 +41,6 @@ if(WEBKIT_LOCAL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(EXISTS ${WEBKIT_PATH}/package.json)
|
||||
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
|
||||
|
||||
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(WEBKIT_OS "windows")
|
||||
elseif(APPLE)
|
||||
@@ -86,10 +78,18 @@ if(ENABLE_ASAN)
|
||||
set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-asan")
|
||||
endif()
|
||||
|
||||
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
|
||||
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
|
||||
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
|
||||
|
||||
if(EXISTS ${WEBKIT_PATH}/package.json)
|
||||
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
|
||||
|
||||
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
|
||||
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
|
||||
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})
|
||||
|
||||
@@ -572,7 +572,7 @@ _bun_outdated_completion() {
|
||||
'--no-progress[Disable the progress bar]' \
|
||||
'--help[Print this help menu]' &&
|
||||
ret=0
|
||||
|
||||
|
||||
case $state in
|
||||
config)
|
||||
_bun_list_bunfig_toml
|
||||
|
||||
@@ -175,6 +175,7 @@ Bun.hash.xxHash3("data", 1234);
|
||||
Bun.hash.murmur32v3("data", 1234);
|
||||
Bun.hash.murmur32v2("data", 1234);
|
||||
Bun.hash.murmur64v2("data", 1234);
|
||||
Bun.hash.rapidhash("data", 1234);
|
||||
```
|
||||
|
||||
## `Bun.CryptoHasher`
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
See the [`bun test`](https://bun.sh/docs/cli/test) documentation.
|
||||
@@ -206,6 +206,38 @@ Each call to `console.log` or `console.error` will be broadcast to the terminal
|
||||
|
||||
Internally, this reuses the existing WebSocket connection from hot module reloading to send the logs.
|
||||
|
||||
### Edit files in the browser
|
||||
|
||||
Bun's frontend dev server has support for [Automatic Workspace Folders](https://chromium.googlesource.com/devtools/devtools-frontend/+/main/docs/ecosystem/automatic_workspace_folders.md) in Chrome DevTools, which lets you save edits to files in the browser.
|
||||
|
||||
{% image src="/images/bun-chromedevtools.gif" alt="Bun's frontend dev server has support for Automatic Workspace Folders in Chrome DevTools, which lets you save edits to files in the browser." /%}
|
||||
|
||||
{% details summary="How it works" %}
|
||||
|
||||
Bun's dev server automatically adds a `/.well-known/appspecific/com.chrome.devtools.json` route to the server.
|
||||
|
||||
This route returns a JSON object with the following shape:
|
||||
|
||||
```json
|
||||
{
|
||||
"workspace": {
|
||||
"root": "/path/to/your/project",
|
||||
"uuid": "a-unique-identifier-for-this-workspace"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For security reasons, this is only enabled when:
|
||||
|
||||
1. The request is coming from localhost, 127.0.0.1, or ::1.
|
||||
2. Hot Module Reloading is enabled.
|
||||
3. The `chromeDevToolsAutomaticWorkspaceFolders` flag is set to `true` or `undefined`.
|
||||
4. There are no other routes that match the request.
|
||||
|
||||
You can disable this by passing `development: { chromeDevToolsAutomaticWorkspaceFolders: false }` in `Bun.serve`'s options.
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
While the server is running:
|
||||
|
||||
@@ -40,3 +40,32 @@ At the end, it runs `bun install` to install `@types/bun`.
|
||||
{% /details %}
|
||||
|
||||
{% bunCLIUsage command="init" /%}
|
||||
|
||||
## React
|
||||
|
||||
The `--react` flag will scaffold a React project:
|
||||
|
||||
```bash
|
||||
$ bun init --react
|
||||
```
|
||||
|
||||
The `--react` flag accepts the following values:
|
||||
|
||||
- `tailwind` - Scaffold a React project with Tailwind CSS
|
||||
- `shadcn` - Scaffold a React project with Shadcn/UI and Tailwind CSS
|
||||
|
||||
### React + TailwindCSS
|
||||
|
||||
This will create a React project with Tailwind CSS configured with Bun's bundler and dev server.
|
||||
|
||||
```bash
|
||||
$ bun init --react=tailwind
|
||||
```
|
||||
|
||||
### React + @shadcn/ui
|
||||
|
||||
This will create a React project with shadcn/ui and Tailwind CSS configured with Bun's bundler and dev server.
|
||||
|
||||
```bash
|
||||
$ bun init --react=shadcn
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Use `bun publish` to publish a package to the npm registry.
|
||||
|
||||
`bun publish` will automatically pack your package into a tarball, strip workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
|
||||
`bun publish` will automatically pack your package into a tarball, strip catalog and workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
|
||||
|
||||
```sh
|
||||
## Publishing the package from the current working directory
|
||||
|
||||
@@ -1,49 +1,50 @@
|
||||
---
|
||||
name: Use React and JSX
|
||||
name: Build a React app with Bun
|
||||
---
|
||||
|
||||
React just works with Bun. Bun supports `.jsx` and `.tsx` files out of the box.
|
||||
Bun supports `.jsx` and `.tsx` files out of the box. React just works with Bun.
|
||||
|
||||
Remember that JSX is just a special syntax for including HTML-like syntax in JavaScript files. React uses JSX syntax, as do alternatives like [Preact](https://preactjs.com/) and [Solid](https://www.solidjs.com/). Bun's internal transpiler converts JSX syntax into vanilla JavaScript before execution.
|
||||
|
||||
---
|
||||
|
||||
Bun _assumes_ you're using React (unless you [configure it otherwise](https://bun.sh/docs/runtime/bunfig#jsx)) so a line like this:
|
||||
|
||||
```
|
||||
const element = <h1>Hello, world!</h1>;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
is internally converted into something like this:
|
||||
|
||||
```ts
|
||||
// jsxDEV
|
||||
import { jsx } from "react/jsx-dev-runtime";
|
||||
|
||||
const element = jsx("h1", { children: "Hello, world!" });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
This code requires `react` to run, so make sure you've installed React.
|
||||
Create a new React app with `bun init --react`. This gives you a template with a simple React app and a simple API server together in one full-stack app.
|
||||
|
||||
```bash
|
||||
$ bun install react
|
||||
# Create a new React app
|
||||
$ bun init --react
|
||||
|
||||
# Run the app in development mode
|
||||
$ bun dev
|
||||
|
||||
# Build as a static site for production
|
||||
$ bun run build
|
||||
|
||||
# Run the server in production
|
||||
$ bun start
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Bun implements special logging for JSX components to make debugging easier.
|
||||
### Hot Reloading
|
||||
|
||||
Run `bun dev` to start the app in development mode. This will start the API server and the React app with hot reloading.
|
||||
|
||||
### Full-Stack App
|
||||
|
||||
Run `bun start` to start the API server and frontend together in one process.
|
||||
|
||||
### Static Site
|
||||
|
||||
Run `bun run build` to build the app as a static site. This will create a `dist` directory with the built app and all the assets.
|
||||
|
||||
```bash
|
||||
$ bun run log-my-component.tsx
|
||||
<Component message="Hello world!" />
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
As far as "official support" for React goes, that's it. React is a library like any other, and Bun can run that library. Bun is not a framework, so you should use a framework like [Vite](https://vitejs.dev/) to build an app with server-side rendering and hot reloading in the browser.
|
||||
|
||||
Refer to [Runtime > JSX](https://bun.sh/docs/runtime/jsx) for complete documentation on configuring JSX.
|
||||
├── src/
|
||||
│ ├── index.tsx # Server entry point with API routes
|
||||
│ ├── frontend.tsx # React app entry point with HMR
|
||||
│ ├── App.tsx # Main React component
|
||||
│ ├── APITester.tsx # Component for testing API endpoints
|
||||
│ ├── index.html # HTML template
|
||||
│ ├── index.css # Styles
|
||||
│ └── *.svg # Static assets
|
||||
├── package.json # Dependencies and scripts
|
||||
├── tsconfig.json # TypeScript configuration
|
||||
├── bunfig.toml # Bun configuration
|
||||
└── bun.lock # Lock file
|
||||
```
|
||||
|
||||
37
docs/install/audit.md
Normal file
37
docs/install/audit.md
Normal file
@@ -0,0 +1,37 @@
|
||||
`bun audit` checks your installed packages for known security vulnerabilities.
|
||||
|
||||
Run the command in a project with a `bun.lock` file:
|
||||
|
||||
```bash
|
||||
$ bun audit
|
||||
```
|
||||
|
||||
Bun sends the list of installed packages and versions to NPM, and prints a report of any vulnerabilities that were found. Packages installed from registries other than the default registry are skipped.
|
||||
|
||||
If no vulnerabilities are found, the command prints:
|
||||
|
||||
```
|
||||
No vulnerabilities found
|
||||
```
|
||||
|
||||
When vulnerabilities are detected, each affected package is listed along with the severity, a short description and a link to the advisory. At the end of the report Bun prints a summary and hints for updating:
|
||||
|
||||
```
|
||||
3 vulnerabilities (1 high, 2 moderate)
|
||||
To update all dependencies to the latest compatible versions:
|
||||
bun update
|
||||
To update all dependencies to the latest versions (including breaking changes):
|
||||
bun update --latest
|
||||
```
|
||||
|
||||
### `--json`
|
||||
|
||||
Use the `--json` flag to print the raw JSON response from the registry instead of the formatted report:
|
||||
|
||||
```bash
|
||||
$ bun audit --json
|
||||
```
|
||||
|
||||
### Exit code
|
||||
|
||||
`bun audit` will exit with code `0` if no vulnerabilities are found and `1` if the report lists any vulnerabilities. This will still happen even if `--json` is passed.
|
||||
296
docs/install/catalogs.md
Normal file
296
docs/install/catalogs.md
Normal file
@@ -0,0 +1,296 @@
|
||||
Catalogs in Bun provide a straightforward way to share common dependency versions across multiple packages in a monorepo. Rather than specifying the same versions repeatedly in each workspace package, you define them once in the root package.json and reference them consistently throughout your project.
|
||||
|
||||
## Overview
|
||||
|
||||
Unlike traditional dependency management where each workspace package needs to independently specify versions, catalogs let you:
|
||||
|
||||
1. Define version catalogs in the root package.json
|
||||
2. Reference these versions with a simple `catalog:` protocol
|
||||
3. Update all packages simultaneously by changing the version in just one place
|
||||
|
||||
This is especially useful in large monorepos where dozens of packages need to use the same version of key dependencies.
|
||||
|
||||
## How to Use Catalogs
|
||||
|
||||
### Directory Structure Example
|
||||
|
||||
Consider a monorepo with the following structure:
|
||||
|
||||
```
|
||||
my-monorepo/
|
||||
├── package.json
|
||||
├── bun.lock
|
||||
└── packages/
|
||||
├── app/
|
||||
│ └── package.json
|
||||
├── ui/
|
||||
│ └── package.json
|
||||
└── utils/
|
||||
└── package.json
|
||||
```
|
||||
|
||||
### 1. Define Catalogs in Root package.json
|
||||
|
||||
In your root-level `package.json`, add a `catalog` or `catalogs` field within the `workspaces` object:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-monorepo",
|
||||
"workspaces": {
|
||||
"packages": ["packages/*"],
|
||||
"catalog": {
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"catalogs": {
|
||||
"testing": {
|
||||
"jest": "30.0.0",
|
||||
"testing-library": "14.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Reference Catalog Versions in Workspace Packages
|
||||
|
||||
In your workspace packages, use the `catalog:` protocol to reference versions:
|
||||
|
||||
**packages/app/package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "app",
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"react-dom": "catalog:",
|
||||
"jest": "catalog:testing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**packages/ui/package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"react-dom": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "catalog:testing",
|
||||
"testing-library": "catalog:testing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Run Bun Install
|
||||
|
||||
Run `bun install` to install all dependencies according to the catalog versions.
|
||||
|
||||
## Catalog vs Catalogs
|
||||
|
||||
Bun supports two ways to define catalogs:
|
||||
|
||||
1. **`catalog`** (singular): A single default catalog for commonly used dependencies
|
||||
|
||||
```json
|
||||
"catalog": {
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0"
|
||||
}
|
||||
```
|
||||
|
||||
Reference with simply `catalog:`:
|
||||
|
||||
```json
|
||||
"dependencies": {
|
||||
"react": "catalog:"
|
||||
}
|
||||
```
|
||||
|
||||
2. **`catalogs`** (plural): Multiple named catalogs for grouping dependencies
|
||||
|
||||
```json
|
||||
"catalogs": {
|
||||
"testing": {
|
||||
"jest": "30.0.0"
|
||||
},
|
||||
"ui": {
|
||||
"tailwind": "4.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Reference with `catalog:<name>`:
|
||||
|
||||
```json
|
||||
"dependencies": {
|
||||
"jest": "catalog:testing",
|
||||
"tailwind": "catalog:ui"
|
||||
}
|
||||
```
|
||||
|
||||
## Benefits of Using Catalogs
|
||||
|
||||
- **Consistency**: Ensures all packages use the same version of critical dependencies
|
||||
- **Maintenance**: Update a dependency version in one place instead of across multiple package.json files
|
||||
- **Clarity**: Makes it obvious which dependencies are standardized across your monorepo
|
||||
- **Simplicity**: No need for complex version resolution strategies or external tools
|
||||
|
||||
## Real-World Example
|
||||
|
||||
Here's a more comprehensive example for a React application:
|
||||
|
||||
**Root package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "react-monorepo",
|
||||
"workspaces": {
|
||||
"packages": ["packages/*"],
|
||||
"catalog": {
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-router-dom": "^6.15.0"
|
||||
},
|
||||
"catalogs": {
|
||||
"build": {
|
||||
"webpack": "5.88.2",
|
||||
"babel": "7.22.10"
|
||||
},
|
||||
"testing": {
|
||||
"jest": "29.6.2",
|
||||
"react-testing-library": "14.0.0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "5.1.6"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**packages/app/package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "app",
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"react-dom": "catalog:",
|
||||
"react-router-dom": "catalog:",
|
||||
"@monorepo/ui": "workspace:*",
|
||||
"@monorepo/utils": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"webpack": "catalog:build",
|
||||
"babel": "catalog:build",
|
||||
"jest": "catalog:testing",
|
||||
"react-testing-library": "catalog:testing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**packages/ui/package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "@monorepo/ui",
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"react-dom": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "catalog:testing",
|
||||
"react-testing-library": "catalog:testing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**packages/utils/package.json**
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "@monorepo/utils",
|
||||
"dependencies": {
|
||||
"react": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "catalog:testing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Updating Versions
|
||||
|
||||
To update versions across all packages, simply change the version in the root package.json:
|
||||
|
||||
```json
|
||||
"catalog": {
|
||||
"react": "^19.1.0", // Updated from ^19.0.0
|
||||
"react-dom": "^19.1.0" // Updated from ^19.0.0
|
||||
}
|
||||
```
|
||||
|
||||
Then run `bun install` to update all packages.
|
||||
|
||||
## Lockfile Integration
|
||||
|
||||
Bun's lockfile tracks catalog versions, making it easy to ensure consistent installations across different environments. The lockfile includes:
|
||||
|
||||
- The catalog definitions from your package.json
|
||||
- The resolution of each cataloged dependency
|
||||
|
||||
```
|
||||
// bun.lock (excerpt)
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "react-monorepo",
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "app",
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"react-dom": "catalog:",
|
||||
...
|
||||
},
|
||||
},
|
||||
...
|
||||
},
|
||||
"catalog": {
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
...
|
||||
},
|
||||
"catalogs": {
|
||||
"build": {
|
||||
"webpack": "5.88.2",
|
||||
...
|
||||
},
|
||||
...
|
||||
},
|
||||
"packages": {
|
||||
...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Limitations and Edge Cases
|
||||
|
||||
- Catalog references must match a dependency defined in either `catalog` or one of the named `catalogs`
|
||||
- Empty strings and whitespace in catalog names are ignored (treated as default catalog)
|
||||
- Invalid dependency versions in catalogs will fail to resolve during `bun install`
|
||||
- Catalogs are only available within workspaces; they cannot be used outside the monorepo
|
||||
|
||||
Bun's catalog system provides a powerful yet simple way to maintain consistency across your monorepo without introducing additional complexity to your workflow.
|
||||
|
||||
## Publishing
|
||||
|
||||
When you run `bun publish` or `bun pm pack`, Bun automatically replaces
|
||||
`catalog:` references in your `package.json` with the resolved version numbers.
|
||||
The published package includes regular semver strings and no longer depends on
|
||||
your catalog definitions.
|
||||
88
docs/install/sfx.md
Normal file
88
docs/install/sfx.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Self-Extracting Executables for Bun
|
||||
|
||||
Bun provides true self-extracting executables for Linux that require **zero dependencies** - not even a shell. These are native binaries that contain the Bun executable embedded within them.
|
||||
|
||||
## Linux
|
||||
|
||||
### Download and Run
|
||||
|
||||
Download the appropriate executable for your system and run it directly:
|
||||
|
||||
**x64 (with AVX2 support):**
|
||||
|
||||
```bash
|
||||
curl -LO https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64
|
||||
chmod +x bun-linux-x64
|
||||
./bun-linux-x64
|
||||
```
|
||||
|
||||
**x64 (baseline - no AVX2):**
|
||||
|
||||
```bash
|
||||
curl -LO https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-baseline
|
||||
chmod +x bun-linux-x64-baseline
|
||||
./bun-linux-x64-baseline
|
||||
```
|
||||
|
||||
**ARM64/AArch64:**
|
||||
|
||||
```bash
|
||||
curl -LO https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64
|
||||
chmod +x bun-linux-aarch64
|
||||
./bun-linux-aarch64
|
||||
```
|
||||
|
||||
### Custom Installation Directory
|
||||
|
||||
By default, Bun installs to `$HOME/.bun/bin`. You can override this:
|
||||
|
||||
```bash
|
||||
BUN_INSTALL_DIR=/usr/local/bin ./bun-linux-x64
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
The self-extracting executables are true native binaries:
|
||||
|
||||
1. **Native Binary**: Written in C and compiled to a static executable
|
||||
2. **Embedded Bun**: The Bun executable is compressed and embedded directly in the binary
|
||||
3. **Zero Dependencies**: Statically linked - requires absolutely nothing to run
|
||||
4. **Small Size**: Uses UPX compression to minimize file size
|
||||
|
||||
When you run the executable, it:
|
||||
|
||||
1. Creates the installation directory
|
||||
2. Decompresses the embedded Bun binary
|
||||
3. Writes it to disk with proper permissions
|
||||
4. Verifies the installation
|
||||
5. Provides PATH setup instructions
|
||||
|
||||
## Advantages
|
||||
|
||||
- **Zero Dependencies**: No shell, no tar, no gzip, no base64 - nothing required
|
||||
- **Single File**: One self-contained executable
|
||||
- **Secure**: Checksums provided for verification
|
||||
- **Universal**: Works on any Linux system with the matching architecture
|
||||
- **Fast**: Native code extraction is faster than shell scripts
|
||||
|
||||
## Verification
|
||||
|
||||
Always verify downloads using the provided checksums:
|
||||
|
||||
```bash
|
||||
# Download checksum
|
||||
curl -LO https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.sha256
|
||||
|
||||
# Verify
|
||||
sha256sum -c bun-linux-x64.sha256
|
||||
```
|
||||
|
||||
## Technical Details
|
||||
|
||||
The self-extracting executables are built using:
|
||||
|
||||
- **Language**: C
|
||||
- **Compression**: zlib (gzip compatible)
|
||||
- **Executable Compression**: UPX with LZMA
|
||||
- **Linking**: Static (no shared library dependencies)
|
||||
- **Cross-compilation**: Supports x64 and aarch64 targets
|
||||
@@ -83,6 +83,14 @@ Workspaces have a couple major benefits.
|
||||
- **Dependencies can be de-duplicated.** If `a` and `b` share a common dependency, it will be _hoisted_ to the root `node_modules` directory. This reduces redundant disk usage and minimizes "dependency hell" issues associated with having multiple versions of a package installed simultaneously.
|
||||
- **Run scripts in multiple packages.** You can use the [`--filter` flag](https://bun.sh/docs/cli/filter) to easily run `package.json` scripts in multiple packages in your workspace.
|
||||
|
||||
## Share versions with Catalogs
|
||||
|
||||
When many packages need the same dependency versions, catalogs let you define
|
||||
those versions once in the root `package.json` and reference them from your
|
||||
workspaces using the `catalog:` protocol. Updating the catalog automatically
|
||||
updates every package that references it. See
|
||||
[Catalogs](https://bun.sh/docs/install/catalogs) for details.
|
||||
|
||||
{% callout %}
|
||||
⚡️ **Speed** — Installs are fast, even for big monorepos. Bun installs the [Remix](https://github.com/remix-run/remix) monorepo in about `500ms` on Linux.
|
||||
|
||||
|
||||
@@ -206,7 +206,7 @@ $ iex "& {$(irm https://bun.sh/install.ps1)} -Version $BUN_LATEST_VERSION"
|
||||
|
||||
## Downloading Bun binaries directly
|
||||
|
||||
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) page on GitHub.
|
||||
To download Bun binaries directly, you can visit the [releases page](https://github.com/oven-sh/bun/releases) on GitHub.
|
||||
|
||||
For convenience, here are download links for the latest version:
|
||||
|
||||
@@ -223,7 +223,16 @@ For convenience, here are download links for the latest version:
|
||||
|
||||
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
### CPU requirements and `baseline` builds
|
||||
|
||||
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install script automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
|
||||
| Build | Intel requirement | AMD requirement |
|
||||
| ------------ | ------------------------------------------------------------------ | ------------------ |
|
||||
| x64 | Haswell (4th generation Core) or newer, except some low-end models | Excavator or newer |
|
||||
| x64-baseline | Nehalem (1st generation Core) or newer | Bulldozer or newer |
|
||||
|
||||
Bun does not currently support any CPUs older than the `baseline` target, which mandates the SSE4.2 extension.
|
||||
|
||||
Bun also publishes `darwin-x64-baseline` binaries, but these are just a copy of the `darwin-x64` ones so they still have the same CPU requirement. We only maintain these since some tools expect them to exist. Bun requires macOS 13.0 or later, which does not support any CPUs that don't meet our requirement.
|
||||
|
||||
|
||||
@@ -183,6 +183,9 @@ export default {
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and monorepo development workflows.",
|
||||
}),
|
||||
page("install/catalogs", "Catalogs", {
|
||||
description: "Use catalogs to share dependency versions between packages in a monorepo.",
|
||||
}),
|
||||
page("install/lifecycle", "Lifecycle scripts", {
|
||||
description: "How Bun handles package lifecycle scripts with trustedDependencies",
|
||||
}),
|
||||
@@ -204,6 +207,9 @@ export default {
|
||||
description:
|
||||
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
|
||||
}),
|
||||
page("install/audit", "Audit dependencies", {
|
||||
description: "Check installed packages for vulnerabilities.",
|
||||
}),
|
||||
page("install/npmrc", ".npmrc support", {
|
||||
description: "Bun supports loading some configuration options from .npmrc",
|
||||
}),
|
||||
@@ -389,7 +395,7 @@ export default {
|
||||
page("api/cc", "C Compiler", {
|
||||
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/test", "Testing", {
|
||||
page("cli/test", "Testing", {
|
||||
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
|
||||
}), // "`bun:test`"),
|
||||
page("api/utils", "Utils", {
|
||||
|
||||
@@ -19,31 +19,43 @@ Click the link in the right column to jump to the associated documentation.
|
||||
|
||||
---
|
||||
|
||||
- HTTP server
|
||||
- HTTP Server
|
||||
- [`Bun.serve`](https://bun.sh/docs/api/http#bun-serve)
|
||||
|
||||
---
|
||||
|
||||
- Shell
|
||||
- [`$`](https://bun.sh/docs/runtime/shell)
|
||||
|
||||
---
|
||||
|
||||
- Bundler
|
||||
- [`Bun.build`](https://bun.sh/docs/bundler)
|
||||
|
||||
---
|
||||
|
||||
- File I/O
|
||||
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file)
|
||||
[`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write)
|
||||
- [`Bun.file`](https://bun.sh/docs/api/file-io#reading-files-bun-file), [`Bun.write`](https://bun.sh/docs/api/file-io#writing-files-bun-write), `Bun.stdin`, `Bun.stdout`, `Bun.stderr`
|
||||
|
||||
---
|
||||
|
||||
- Child processes
|
||||
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn)
|
||||
[`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
|
||||
- Child Processes
|
||||
- [`Bun.spawn`](https://bun.sh/docs/api/spawn#spawn-a-process-bun-spawn), [`Bun.spawnSync`](https://bun.sh/docs/api/spawn#blocking-api-bun-spawnsync)
|
||||
|
||||
---
|
||||
|
||||
- TCP
|
||||
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
|
||||
[`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
|
||||
- TCP Sockets
|
||||
- [`Bun.listen`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen), [`Bun.connect`](https://bun.sh/docs/api/tcp#start-a-server-bun-listen)
|
||||
|
||||
---
|
||||
|
||||
- UDP Sockets
|
||||
- [`Bun.udpSocket`](https://bun.sh/docs/api/udp)
|
||||
|
||||
---
|
||||
|
||||
- WebSockets
|
||||
- `new WebSocket()` (client), [`Bun.serve`](https://bun.sh/docs/api/websockets) (server)
|
||||
|
||||
---
|
||||
|
||||
@@ -57,44 +69,53 @@ Click the link in the right column to jump to the associated documentation.
|
||||
|
||||
---
|
||||
|
||||
- Streaming HTML Transformations
|
||||
- Streaming HTML
|
||||
- [`HTMLRewriter`](https://bun.sh/docs/api/html-rewriter)
|
||||
|
||||
---
|
||||
|
||||
- Hashing
|
||||
- [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash)
|
||||
[`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher)
|
||||
- [`Bun.password`](https://bun.sh/docs/api/hashing#bun-password), [`Bun.hash`](https://bun.sh/docs/api/hashing#bun-hash), [`Bun.CryptoHasher`](https://bun.sh/docs/api/hashing#bun-cryptohasher), `Bun.sha`
|
||||
|
||||
---
|
||||
|
||||
- import.meta
|
||||
- [`import.meta`](https://bun.sh/docs/api/import-meta)
|
||||
|
||||
---
|
||||
|
||||
<!-- - [DNS](https://bun.sh/docs/api/dns)
|
||||
- `Bun.dns`
|
||||
|
||||
--- -->
|
||||
|
||||
- SQLite
|
||||
- [`bun:sqlite`](https://bun.sh/docs/api/sqlite)
|
||||
|
||||
---
|
||||
|
||||
- FFI
|
||||
- PostgreSQL Client
|
||||
- [`Bun.SQL`](https://bun.sh/docs/api/sql), `Bun.sql`
|
||||
|
||||
---
|
||||
|
||||
- Redis (Valkey) Client
|
||||
- [`Bun.RedisClient`](https://bun.sh/docs/api/redis), `Bun.redis`
|
||||
|
||||
---
|
||||
|
||||
- FFI (Foreign Function Interface)
|
||||
- [`bun:ffi`](https://bun.sh/docs/api/ffi)
|
||||
|
||||
---
|
||||
|
||||
- DNS
|
||||
- [`Bun.dns.lookup`](https://bun.sh/docs/api/dns), `Bun.dns.prefetch`, `Bun.dns.getCacheStats`
|
||||
|
||||
---
|
||||
|
||||
- Testing
|
||||
- [`bun:test`](https://bun.sh/docs/cli/test)
|
||||
|
||||
---
|
||||
|
||||
- Node-API
|
||||
- [`Node-API`](https://bun.sh/docs/api/node-api)
|
||||
- Workers
|
||||
- [`new Worker()`](https://bun.sh/docs/api/workers)
|
||||
|
||||
---
|
||||
|
||||
- Module Loaders
|
||||
- [`Bun.plugin`](https://bun.sh/docs/bundler/plugins)
|
||||
|
||||
---
|
||||
|
||||
@@ -103,27 +124,84 @@ Click the link in the right column to jump to the associated documentation.
|
||||
|
||||
---
|
||||
|
||||
- Cookies
|
||||
- [`Bun.Cookie`](https://bun.sh/docs/api/cookie), [`Bun.CookieMap`](https://bun.sh/docs/api/cookie)
|
||||
|
||||
---
|
||||
|
||||
- Node-API
|
||||
- [`Node-API`](https://bun.sh/docs/api/node-api)
|
||||
|
||||
---
|
||||
|
||||
- `import.meta`
|
||||
- [`import.meta`](https://bun.sh/docs/api/import-meta)
|
||||
|
||||
---
|
||||
|
||||
- Utilities
|
||||
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version)
|
||||
[`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision)
|
||||
[`Bun.env`](https://bun.sh/docs/api/utils#bun-env)
|
||||
[`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
|
||||
[`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep)
|
||||
[`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync)
|
||||
[`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
|
||||
[`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek)
|
||||
[`Bun.openInEditor()`](https://bun.sh/docs/api/utils#bun-openineditor)
|
||||
[`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals)
|
||||
[`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml)
|
||||
[`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath)
|
||||
[`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
|
||||
[`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync)
|
||||
[`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync)
|
||||
[`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync)
|
||||
[`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync)
|
||||
[`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
|
||||
[`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
|
||||
[`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto)
|
||||
[`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
|
||||
- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version), [`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision), [`Bun.env`](https://bun.sh/docs/api/utils#bun-env), [`Bun.main`](https://bun.sh/docs/api/utils#bun-main)
|
||||
|
||||
---
|
||||
|
||||
- Sleep & Timing
|
||||
- [`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep), [`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync), [`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds)
|
||||
|
||||
---
|
||||
|
||||
- Random & UUID
|
||||
- [`Bun.randomUUIDv7()`](https://bun.sh/docs/api/utils#bun-randomuuidv7)
|
||||
|
||||
---
|
||||
|
||||
- System & Environment
|
||||
- [`Bun.which()`](https://bun.sh/docs/api/utils#bun-which)
|
||||
|
||||
---
|
||||
|
||||
- Comparison & Inspection
|
||||
- [`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek), [`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals), `Bun.deepMatch`, [`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect)
|
||||
|
||||
---
|
||||
|
||||
- String & Text Processing
|
||||
- [`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml), [`Bun.stringWidth()`](https://bun.sh/docs/api/utils#bun-stringwidth), `Bun.indexOfLine`
|
||||
|
||||
---
|
||||
|
||||
- URL & Path Utilities
|
||||
- [`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath), [`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl)
|
||||
|
||||
---
|
||||
|
||||
- Compression
|
||||
- [`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync), [`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync), [`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync), [`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync), `Bun.zstdCompressSync()`, `Bun.zstdDecompressSync()`, `Bun.zstdCompress()`, `Bun.zstdDecompress()`
|
||||
|
||||
---
|
||||
|
||||
- Stream Processing
|
||||
- [`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()`
|
||||
|
||||
---
|
||||
|
||||
- Memory & Buffer Management
|
||||
- `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers`
|
||||
|
||||
---
|
||||
|
||||
- Module Resolution
|
||||
- [`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync)
|
||||
|
||||
---
|
||||
|
||||
- Parsing & Formatting
|
||||
- [`Bun.semver`](https://bun.sh/docs/api/semver), `Bun.TOML.parse`, [`Bun.color`](https://bun.sh/docs/api/color)
|
||||
|
||||
---
|
||||
|
||||
- Low-level / Internals
|
||||
- `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.sh/docs/api/bun-jsc)
|
||||
|
||||
---
|
||||
|
||||
{% /table %}
|
||||
|
||||
@@ -76,7 +76,7 @@ The `define` field allows you to replace certain global identifiers with constan
|
||||
|
||||
### `loader`
|
||||
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun. If
|
||||
Configure how Bun maps file extensions to loaders. This is useful for loading files that aren't natively supported by Bun.
|
||||
|
||||
```toml
|
||||
[loader]
|
||||
|
||||
@@ -120,7 +120,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:net`](https://nodejs.org/api/net.html)
|
||||
|
||||
🟡 `SocketAddress` class not exposed (but implemented). `BlockList` exists but is a no-op.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`node:perf_hooks`](https://nodejs.org/api/perf_hooks.html)
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ Once the plugin is registered, `.yaml` and `.yml` files can be directly imported
|
||||
{% codetabs %}
|
||||
|
||||
```ts#index.ts
|
||||
import data from "./data.yml"
|
||||
import * as data from "./data.yml"
|
||||
|
||||
console.log(data);
|
||||
```
|
||||
|
||||
@@ -17,6 +17,7 @@ console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
|
||||
19
misctools/lldb/init.lldb
Normal file
19
misctools/lldb/init.lldb
Normal file
@@ -0,0 +1,19 @@
|
||||
# This file is separate from .lldbinit because it has to be in the same directory as the Python
|
||||
# modules in order for the "attach" action to work.
|
||||
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import -c lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import -c lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
25
package.json
25
package.json
@@ -1,22 +1,14 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.14",
|
||||
"version": "1.2.16",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@mdn/browser-compat-data": "~5.5.28",
|
||||
"@types/bun": "*",
|
||||
"@types/react": "^18.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.11.0",
|
||||
"@typescript-eslint/parser": "^7.11.0",
|
||||
"@vscode/debugadapter": "^1.65.0",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"caniuse-lite": "^1.0.30001620",
|
||||
"esbuild": "^0.21.4",
|
||||
"eslint": "^9.4.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.5.3",
|
||||
@@ -27,13 +19,15 @@
|
||||
"typescript": "^5.7.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"bun-types": "workspace:packages/bun-types"
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
"@types/bun": "workspace:packages/@types/bun"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun run build:debug",
|
||||
"watch": "zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"watch-windows": "zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan",
|
||||
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
|
||||
@@ -48,6 +42,7 @@
|
||||
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release-local",
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"run:linux": "docker run --rm -v \"$PWD:/root/bun/\" -w /root/bun ghcr.io/oven-sh/bun-development-docker-image",
|
||||
"css-properties": "bun run src/css/properties/generate_properties.ts",
|
||||
"uv-posix-stubs": "bun run src/bun.js/bindings/libuv/generate_uv_posix_stubs.ts",
|
||||
"bump": "bun ./scripts/bump.ts",
|
||||
@@ -81,6 +76,8 @@
|
||||
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
|
||||
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true"
|
||||
"node:test:cp": "bun ./scripts/fetch-node-test.ts ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
|
||||
"sync-webkit-source": "bun ./scripts/sync-webkit-source.ts"
|
||||
}
|
||||
}
|
||||
|
||||
21
packages/@types/bun/LICENSE
Normal file
21
packages/@types/bun/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
3
packages/@types/bun/README.md
Normal file
3
packages/@types/bun/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# @types/bun alias
|
||||
|
||||
This is an internal alias of the DefinitelyTyped `@types/bun` package. This alias exists to skip downloading the @types/bun package in Bun's own CI.
|
||||
1
packages/@types/bun/index.d.ts
vendored
Normal file
1
packages/@types/bun/index.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/// <reference types="bun-types" />
|
||||
58
packages/@types/bun/package.json
Normal file
58
packages/@types/bun/package.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"name": "@types/bun",
|
||||
"version": "1.2.2",
|
||||
"description": "TypeScript definitions for bun",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/bun",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Jarred Sumner",
|
||||
"githubUsername": "Jarred-Sumner",
|
||||
"url": "https://github.com/Jarred-Sumner"
|
||||
},
|
||||
{
|
||||
"name": "Ashcon Partovi",
|
||||
"githubUsername": "electroid",
|
||||
"url": "https://github.com/electroid"
|
||||
},
|
||||
{
|
||||
"name": "Chloe Caruso",
|
||||
"githubUsername": "paperclover",
|
||||
"url": "https://github.com/paperclover"
|
||||
},
|
||||
{
|
||||
"name": "Robobun",
|
||||
"githubUsername": "robobun",
|
||||
"url": "https://github.com/robobun"
|
||||
},
|
||||
{
|
||||
"name": "Dylan Conway",
|
||||
"githubUsername": "dylan-conway",
|
||||
"url": "https://github.com/dylan-conway"
|
||||
},
|
||||
{
|
||||
"name": "Meghan Denny",
|
||||
"githubUsername": "nektro",
|
||||
"url": "https://github.com/nektro"
|
||||
},
|
||||
{
|
||||
"name": "Michael H",
|
||||
"githubUsername": "RiskyMH",
|
||||
"url": "https://github.com/RiskyMH"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||
"directory": "types/bun"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"bun-types": "workspace:"
|
||||
},
|
||||
"peerDependencies": {},
|
||||
"typesPublisherContentHash": "caeb56f2d4753af08a8e57856ec5e9d86ad3542da2171a68e19698ec4539995a",
|
||||
"typeScriptVersion": "5.0"
|
||||
}
|
||||
@@ -743,7 +743,7 @@ export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
source,
|
||||
request,
|
||||
// It is theoretically possible for a breakpoint to resolve to multiple locations.
|
||||
// In that case, send a seperate `breakpoint` event for each one, excluding the first.
|
||||
// In that case, send a separate `breakpoint` event for each one, excluding the first.
|
||||
notify: i > 0,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -42,11 +42,11 @@ export default class RuntimeError {
|
||||
original: Error;
|
||||
stack: StackFrame[];
|
||||
|
||||
static from(error: Error) {
|
||||
static from(error: Error): RuntimeError {
|
||||
const runtime = new RuntimeError();
|
||||
runtime.original = error;
|
||||
runtime.stack = this.parseStack(error);
|
||||
return RuntimeError;
|
||||
return runtime;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
340
packages/bun-types/bun.d.ts
vendored
340
packages/bun-types/bun.d.ts
vendored
@@ -1865,6 +1865,7 @@ declare module "bun" {
|
||||
murmur32v3: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur32v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
murmur64v2: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
rapidhash: (data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
}
|
||||
|
||||
type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
|
||||
@@ -3304,6 +3305,8 @@ declare module "bun" {
|
||||
interface BunRequest<T extends string = string> extends Request {
|
||||
params: RouterTypes.ExtractRouteParams<T>;
|
||||
readonly cookies: CookieMap;
|
||||
|
||||
clone(): BunRequest<T>;
|
||||
}
|
||||
|
||||
interface GenericServeOptions {
|
||||
@@ -3355,6 +3358,30 @@ declare module "bun" {
|
||||
* @default false
|
||||
*/
|
||||
console?: boolean;
|
||||
|
||||
/**
|
||||
* Enable automatic workspace folders for Chrome DevTools
|
||||
*
|
||||
* This lets you persistently edit files in the browser. It works by adding the following route to the server:
|
||||
* `/.well-known/appspecific/com.chrome.devtools.json`
|
||||
*
|
||||
* The response is a JSON object with the following shape:
|
||||
* ```json
|
||||
* {
|
||||
* "workspace": {
|
||||
* "root": "<cwd>",
|
||||
* "uuid": "<uuid>"
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* The `root` field is the current working directory of the server.
|
||||
* The `"uuid"` field is a hash of the file that started the server and a hash of the current working directory.
|
||||
*
|
||||
* For security reasons, if the remote socket address is not from localhost, 127.0.0.1, or ::1, the request is ignored.
|
||||
* @default true
|
||||
*/
|
||||
chromeDevToolsAutomaticWorkspaceFolders?: boolean;
|
||||
};
|
||||
|
||||
error?: (this: Server, error: ErrorLike) => Response | Promise<Response> | void | Promise<void>;
|
||||
@@ -3660,7 +3687,7 @@ declare module "bun" {
|
||||
* the well-known CAs curated by Mozilla. Mozilla's CAs are completely
|
||||
* replaced when CAs are explicitly specified using this option.
|
||||
*/
|
||||
ca?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
|
||||
ca?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
|
||||
/**
|
||||
* Cert chains in PEM format. One cert chain should be provided per
|
||||
* private key. Each cert chain should consist of the PEM formatted
|
||||
@@ -3672,7 +3699,7 @@ declare module "bun" {
|
||||
* intermediate certificates are not provided, the peer will not be
|
||||
* able to validate the certificate, and the handshake will fail.
|
||||
*/
|
||||
cert?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
|
||||
cert?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
|
||||
/**
|
||||
* Private keys in PEM format. PEM allows the option of private keys
|
||||
* being encrypted. Encrypted keys will be decrypted with
|
||||
@@ -3683,13 +3710,25 @@ declare module "bun" {
|
||||
* object.passphrase is optional. Encrypted keys will be decrypted with
|
||||
* object.passphrase if provided, or options.passphrase if it is not.
|
||||
*/
|
||||
key?: string | Buffer | BunFile | Array<string | Buffer | BunFile> | undefined;
|
||||
key?: string | BufferSource | BunFile | Array<string | BufferSource | BunFile> | undefined;
|
||||
/**
|
||||
* Optionally affect the OpenSSL protocol behavior, which is not
|
||||
* usually necessary. This should be used carefully if at all! Value is
|
||||
* a numeric bitmask of the SSL_OP_* options from OpenSSL Options
|
||||
*/
|
||||
secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options
|
||||
|
||||
keyFile?: string;
|
||||
|
||||
certFile?: string;
|
||||
|
||||
ALPNProtocols?: string | BufferSource;
|
||||
|
||||
ciphers?: string;
|
||||
|
||||
clientRenegotiationLimit?: number;
|
||||
|
||||
clientRenegotiationWindow?: number;
|
||||
}
|
||||
|
||||
// Note for contributors: TLSOptionsAsDeprecated should be considered immutable
|
||||
@@ -5403,6 +5442,42 @@ declare module "bun" {
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
|
||||
/**
|
||||
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
|
||||
* @param data The buffer of data to compress
|
||||
* @param options Compression options to use
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function zstdCompressSync(
|
||||
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
|
||||
options?: { level?: number },
|
||||
): Buffer;
|
||||
|
||||
/**
|
||||
* Compresses a chunk of data with the Zstandard (zstd) compression algorithm.
|
||||
* @param data The buffer of data to compress
|
||||
* @param options Compression options to use
|
||||
* @returns A promise that resolves to the output buffer with the compressed data
|
||||
*/
|
||||
function zstdCompress(
|
||||
data: NodeJS.TypedArray | Buffer | string | ArrayBuffer,
|
||||
options?: { level?: number },
|
||||
): Promise<Buffer>;
|
||||
|
||||
/**
|
||||
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function zstdDecompressSync(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Buffer;
|
||||
|
||||
/**
|
||||
* Decompresses a chunk of data with the Zstandard (zstd) decompression algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns A promise that resolves to the output buffer with the decompressed data
|
||||
*/
|
||||
function zstdDecompress(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Promise<Buffer>;
|
||||
|
||||
type Target =
|
||||
/**
|
||||
* For generating bundles that are intended to be run by the Bun runtime. In many cases,
|
||||
@@ -5797,31 +5872,76 @@ declare module "bun" {
|
||||
index: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a TCP or TLS socket connection used for network communication.
|
||||
* This interface provides methods for reading, writing, managing the connection state,
|
||||
* and handling TLS-specific features if applicable.
|
||||
*
|
||||
* Sockets are created using `Bun.connect()` or accepted by a `Bun.listen()` server.
|
||||
*
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
interface Socket<Data = undefined> extends Disposable {
|
||||
/**
|
||||
* Write `data` to the socket
|
||||
* Writes `data` to the socket. This method is unbuffered and non-blocking. This uses the `sendto(2)` syscall internally.
|
||||
*
|
||||
* @param data The data to write to the socket
|
||||
* @param byteOffset The offset in the buffer to start writing from (defaults to 0)
|
||||
* @param byteLength The number of bytes to write (defaults to the length of the buffer)
|
||||
* For optimal performance with multiple small writes, consider batching multiple
|
||||
* writes together into a single `socket.write()` call.
|
||||
*
|
||||
* When passed a string, `byteOffset` and `byteLength` refer to the UTF-8 offset, not the string character offset.
|
||||
* @param data The data to write. Can be a string (encoded as UTF-8), `ArrayBuffer`, `TypedArray`, or `DataView`.
|
||||
* @param byteOffset The offset in bytes within the buffer to start writing from. Defaults to 0. Ignored for strings.
|
||||
* @param byteLength The number of bytes to write from the buffer. Defaults to the remaining length of the buffer from the offset. Ignored for strings.
|
||||
* @returns The number of bytes written. Returns `-1` if the socket is closed or shutting down. Can return less than the input size if the socket's buffer is full (backpressure).
|
||||
* @example
|
||||
* ```ts
|
||||
* // Send a string
|
||||
* const bytesWritten = socket.write("Hello, world!\n");
|
||||
*
|
||||
* This is unbuffered as of Bun v0.2.2. That means individual write() calls
|
||||
* will be slow. In the future, Bun will buffer writes and flush them at the
|
||||
* end of the tick, when the event loop is idle, or sooner if the buffer is full.
|
||||
* // Send binary data
|
||||
* const buffer = new Uint8Array([0x01, 0x02, 0x03]);
|
||||
* socket.write(buffer);
|
||||
*
|
||||
* // Send part of a buffer
|
||||
* const largeBuffer = new Uint8Array(1024);
|
||||
* // ... fill largeBuffer ...
|
||||
* socket.write(largeBuffer, 100, 50); // Write 50 bytes starting from index 100
|
||||
* ```
|
||||
*/
|
||||
write(data: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
/**
|
||||
* The data context for the socket.
|
||||
* The user-defined data associated with this socket instance.
|
||||
* This can be set when the socket is created via `Bun.connect({ data: ... })`.
|
||||
* It can be read or updated at any time.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // In a socket handler
|
||||
* function open(socket: Socket<{ userId: string }>) {
|
||||
* console.log(`Socket opened for user: ${socket.data.userId}`);
|
||||
* socket.data.lastActivity = Date.now(); // Update data
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
data: Data;
|
||||
|
||||
/**
|
||||
* Like {@link Socket.write} except it includes a TCP FIN packet
|
||||
* Sends the final data chunk and initiates a graceful shutdown of the socket's write side.
|
||||
* After calling `end()`, no more data can be written using `write()` or `end()`.
|
||||
* The socket remains readable until the remote end also closes its write side or the connection is terminated.
|
||||
* This sends a TCP FIN packet after writing the data.
|
||||
*
|
||||
* Use it to send your last message and close the connection.
|
||||
* @param data Optional final data to write before closing. Same types as `write()`.
|
||||
* @param byteOffset Optional offset for buffer data.
|
||||
* @param byteLength Optional length for buffer data.
|
||||
* @returns The number of bytes written for the final chunk. Returns `-1` if the socket was already closed or shutting down.
|
||||
* @example
|
||||
* ```ts
|
||||
* // send some data and close the write side
|
||||
* socket.end("Goodbye!");
|
||||
* // or close write side without sending final data
|
||||
* socket.end();
|
||||
* ```
|
||||
*/
|
||||
end(data?: string | BufferSource, byteOffset?: number, byteLength?: number): number;
|
||||
|
||||
@@ -5848,20 +5968,33 @@ declare module "bun" {
|
||||
timeout(seconds: number): void;
|
||||
|
||||
/**
|
||||
* Forcefully close the socket. The other end may not receive all data, and
|
||||
* the socket will be closed immediately.
|
||||
* Forcefully closes the socket connection immediately. This is an abrupt termination, unlike the graceful shutdown initiated by `end()`.
|
||||
* It uses `SO_LINGER` with `l_onoff=1` and `l_linger=0` before calling `close(2)`.
|
||||
* Consider using {@link close close()} or {@link end end()} for graceful shutdowns.
|
||||
*
|
||||
* This passes `SO_LINGER` with `l_onoff` set to `1` and `l_linger` set to
|
||||
* `0` and then calls `close(2)`.
|
||||
* @example
|
||||
* ```ts
|
||||
* socket.terminate();
|
||||
* ```
|
||||
*/
|
||||
terminate(): void;
|
||||
|
||||
/**
|
||||
* Shutdown writes to a socket
|
||||
* Shuts down the write-half or both halves of the connection.
|
||||
* This allows the socket to enter a half-closed state where it can still receive data
|
||||
* but can no longer send data (`halfClose = true`), or close both read and write
|
||||
* (`halfClose = false`, similar to `end()` but potentially more immediate depending on OS).
|
||||
* Calls `shutdown(2)` syscall internally.
|
||||
*
|
||||
* This makes the socket a half-closed socket. It can still receive data.
|
||||
* @param halfClose If `true`, only shuts down the write side (allows receiving). If `false` or omitted, shuts down both read and write. Defaults to `false`.
|
||||
* @example
|
||||
* ```ts
|
||||
* // Stop sending data, but allow receiving
|
||||
* socket.shutdown(true);
|
||||
*
|
||||
* This calls [shutdown(2)](https://man7.org/linux/man-pages/man2/shutdown.2.html) internally
|
||||
* // Shutdown both reading and writing
|
||||
* socket.shutdown();
|
||||
* ```
|
||||
*/
|
||||
shutdown(halfClose?: boolean): void;
|
||||
|
||||
@@ -5887,6 +6020,11 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Flush any buffered data to the socket
|
||||
* This attempts to send the data immediately, but success depends on the network conditions
|
||||
* and the receiving end.
|
||||
* It might be necessary after several `write` calls if immediate sending is critical,
|
||||
* though often the OS handles flushing efficiently. Note that `write` calls outside
|
||||
* `open`/`data`/`drain` might benefit from manual `cork`/`flush`.
|
||||
*/
|
||||
flush(): void;
|
||||
|
||||
@@ -5908,17 +6046,31 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Remote IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly remoteAddress: string;
|
||||
|
||||
/**
|
||||
* Remote port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly remotePort: number;
|
||||
|
||||
/**
|
||||
* IP protocol family used for the local endpoint of the socket
|
||||
* @example "IPv4" | "IPv6"
|
||||
*/
|
||||
readonly localFamily: "IPv4" | "IPv6";
|
||||
|
||||
/**
|
||||
* Local IP address connected to the socket
|
||||
* @example "192.168.1.100" | "2001:db8::1"
|
||||
*/
|
||||
readonly localAddress: string;
|
||||
|
||||
/**
|
||||
* local port connected to the socket
|
||||
* @example 8080
|
||||
*/
|
||||
readonly localPort: number;
|
||||
|
||||
@@ -6022,7 +6174,7 @@ declare module "bun" {
|
||||
* certificate.
|
||||
* @return A certificate object.
|
||||
*/
|
||||
getPeerCertificate(): import("tls").PeerCertificate;
|
||||
getPeerCertificate(): import("node:tls").PeerCertificate;
|
||||
getPeerX509Certificate(): import("node:crypto").X509Certificate;
|
||||
|
||||
/**
|
||||
@@ -6082,6 +6234,8 @@ declare module "bun" {
|
||||
/**
|
||||
* See `Session Resumption` for more information.
|
||||
* @return `true` if the session was reused, `false` otherwise.
|
||||
* **TLS Only:** Checks if the current TLS session was resumed from a previous session.
|
||||
* Returns `true` if the session was resumed, `false` otherwise.
|
||||
*/
|
||||
isSessionReused(): boolean;
|
||||
|
||||
@@ -6124,9 +6278,98 @@ declare module "bun" {
|
||||
setKeepAlive(enable?: boolean, initialDelay?: number): boolean;
|
||||
|
||||
/**
|
||||
* The number of bytes written to the socket.
|
||||
* The total number of bytes successfully written to the socket since it was established.
|
||||
* This includes data currently buffered by the OS but not yet acknowledged by the remote peer.
|
||||
*/
|
||||
readonly bytesWritten: number;
|
||||
|
||||
/**
|
||||
* Alias for `socket.end()`. Allows the socket to be used with `using` declarations
|
||||
* for automatic resource management.
|
||||
* @example
|
||||
* ```ts
|
||||
* async function processSocket() {
|
||||
* using socket = await Bun.connect({ ... });
|
||||
* socket.write("Data");
|
||||
* // socket.end() is called automatically when exiting the scope
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
resume(): void;
|
||||
|
||||
pause(): void;
|
||||
|
||||
/**
|
||||
* If this is a TLS Socket
|
||||
*/
|
||||
renegotiate(): void;
|
||||
|
||||
/**
|
||||
* Sets the verify mode of the socket.
|
||||
*
|
||||
* @param requestCert Whether to request a certificate.
|
||||
* @param rejectUnauthorized Whether to reject unauthorized certificates.
|
||||
*/
|
||||
setVerifyMode(requestCert: boolean, rejectUnauthorized: boolean): void;
|
||||
|
||||
getSession(): void;
|
||||
|
||||
/**
|
||||
* Sets the session of the socket.
|
||||
*
|
||||
* @param session The session to set.
|
||||
*/
|
||||
setSession(session: string | Buffer | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Exports the keying material of the socket.
|
||||
*
|
||||
* @param length The length of the keying material to export.
|
||||
* @param label The label of the keying material to export.
|
||||
* @param context The context of the keying material to export.
|
||||
*/
|
||||
exportKeyingMaterial(length: number, label: string, context?: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Upgrades the socket to a TLS socket.
|
||||
*
|
||||
* @param options The options for the upgrade.
|
||||
* @returns A tuple containing the raw socket and the TLS socket.
|
||||
* @see {@link TLSUpgradeOptions}
|
||||
*/
|
||||
upgradeTLS<Data>(options: TLSUpgradeOptions<Data>): [raw: Socket<Data>, tls: Socket<Data>];
|
||||
|
||||
/**
|
||||
* Closes the socket.
|
||||
*
|
||||
* This is a wrapper around `end()` and `shutdown()`.
|
||||
*
|
||||
* @see {@link end}
|
||||
* @see {@link shutdown}
|
||||
*/
|
||||
close(): void;
|
||||
|
||||
/**
|
||||
* Returns the servername of the socket.
|
||||
*
|
||||
* @see {@link setServername}
|
||||
*/
|
||||
getServername(): string;
|
||||
|
||||
/**
|
||||
* Sets the servername of the socket.
|
||||
*
|
||||
* @see {@link getServername}
|
||||
*/
|
||||
setServername(name: string): void;
|
||||
}
|
||||
|
||||
interface TLSUpgradeOptions<Data> {
|
||||
data?: Data;
|
||||
tls: TLSOptions | boolean;
|
||||
socket: SocketHandler<Data>;
|
||||
}
|
||||
|
||||
interface SocketListener<Data = undefined> extends Disposable {
|
||||
@@ -6227,6 +6470,22 @@ declare module "bun" {
|
||||
* The per-instance data context
|
||||
*/
|
||||
data?: Data;
|
||||
/**
|
||||
* Whether to allow half-open connections.
|
||||
*
|
||||
* A half-open connection occurs when one end of the connection has called `close()`
|
||||
* or sent a FIN packet, while the other end remains open. When set to `true`:
|
||||
*
|
||||
* - The socket won't automatically send FIN when the remote side closes its end
|
||||
* - The local side can continue sending data even after the remote side has closed
|
||||
* - The application must explicitly call `end()` to fully close the connection
|
||||
*
|
||||
* When `false`, the socket automatically closes both ends of the connection when
|
||||
* either side closes.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allowHalfOpen?: boolean;
|
||||
}
|
||||
|
||||
interface TCPSocketListenOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
@@ -6241,7 +6500,7 @@ declare module "bun" {
|
||||
/**
|
||||
* The TLS configuration object with which to create the server
|
||||
*/
|
||||
tls?: TLSOptions;
|
||||
tls?: TLSOptions | boolean;
|
||||
/**
|
||||
* Whether to use exclusive mode.
|
||||
*
|
||||
@@ -6287,7 +6546,7 @@ declare module "bun" {
|
||||
/**
|
||||
* TLS Configuration with which to create the socket
|
||||
*/
|
||||
tls?: boolean;
|
||||
tls?: TLSOptions | boolean;
|
||||
/**
|
||||
* Whether to use exclusive mode.
|
||||
*
|
||||
@@ -6303,22 +6562,8 @@ declare module "bun" {
|
||||
* @default false
|
||||
*/
|
||||
exclusive?: boolean;
|
||||
/**
|
||||
* Whether to allow half-open connections.
|
||||
*
|
||||
* A half-open connection occurs when one end of the connection has called `close()`
|
||||
* or sent a FIN packet, while the other end remains open. When set to `true`:
|
||||
*
|
||||
* - The socket won't automatically send FIN when the remote side closes its end
|
||||
* - The local side can continue sending data even after the remote side has closed
|
||||
* - The application must explicitly call `end()` to fully close the connection
|
||||
*
|
||||
* When `false` (default), the socket automatically closes both ends of the connection
|
||||
* when either side closes.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allowHalfOpen?: boolean;
|
||||
reusePort?: boolean;
|
||||
ipv6Only?: boolean;
|
||||
}
|
||||
|
||||
interface UnixSocketOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
@@ -6329,14 +6574,14 @@ declare module "bun" {
|
||||
/**
|
||||
* TLS Configuration with which to create the socket
|
||||
*/
|
||||
tls?: TLSOptions;
|
||||
tls?: TLSOptions | boolean;
|
||||
}
|
||||
|
||||
interface FdSocketOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
/**
|
||||
* TLS Configuration with which to create the socket
|
||||
*/
|
||||
tls?: TLSOptions;
|
||||
tls?: TLSOptions | boolean;
|
||||
/**
|
||||
* The file descriptor to connect to
|
||||
*/
|
||||
@@ -6605,7 +6850,7 @@ declare module "bun" {
|
||||
* incoming messages, and `subprocess.send` can send messages to the subprocess. Messages are serialized
|
||||
* using the JSC serialize API, which allows for the same types that `postMessage`/`structuredClone` supports.
|
||||
*
|
||||
* The subprocess can send and recieve messages by using `process.send` and `process.on("message")`,
|
||||
* The subprocess can send and receive messages by using `process.send` and `process.on("message")`,
|
||||
* respectively. This is the same API as what Node.js exposes when `child_process.fork()` is used.
|
||||
*
|
||||
* Currently, this is only compatible with processes that are other `bun` instances.
|
||||
@@ -7443,9 +7688,16 @@ declare module "bun" {
|
||||
workspaces: {
|
||||
[workspace: string]: BunLockFileWorkspacePackage;
|
||||
};
|
||||
/** @see https://bun.sh/docs/install/overrides */
|
||||
overrides?: Record<string, string>;
|
||||
/** @see https://bun.sh/docs/install/patch */
|
||||
patchedDependencies?: Record<string, string>;
|
||||
/** @see https://bun.sh/docs/install/lifecycle#trusteddependencies */
|
||||
trustedDependencies?: string[];
|
||||
/** @see https://bun.sh/docs/install/catalogs */
|
||||
catalog?: Record<string, string>;
|
||||
/** @see https://bun.sh/docs/install/catalogs */
|
||||
catalogs?: Record<string, Record<string, string>>;
|
||||
|
||||
/**
|
||||
* ```
|
||||
|
||||
3
packages/bun-types/globals.d.ts
vendored
3
packages/bun-types/globals.d.ts
vendored
@@ -10,6 +10,7 @@ declare module "bun" {
|
||||
|
||||
type NodeCryptoWebcryptoSubtleCrypto = import("crypto").webcrypto.SubtleCrypto;
|
||||
type NodeCryptoWebcryptoCryptoKey = import("crypto").webcrypto.CryptoKey;
|
||||
type NodeCryptoWebcryptoCryptoKeyPair = import("crypto").webcrypto.CryptoKeyPair;
|
||||
|
||||
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
|
||||
|
||||
@@ -884,6 +885,8 @@ declare var CryptoKey: {
|
||||
new (): CryptoKey;
|
||||
};
|
||||
|
||||
interface CryptoKeyPair extends Bun.__internal.NodeCryptoWebcryptoCryptoKeyPair {}
|
||||
|
||||
interface Position {
|
||||
lineText: string;
|
||||
file: string;
|
||||
|
||||
@@ -18,9 +18,6 @@
|
||||
"@types/node": "*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.5.3",
|
||||
"@definitelytyped/dtslint": "^0.0.199",
|
||||
"@definitelytyped/eslint-plugin": "^0.0.197",
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
13
packages/bun-types/redis.d.ts
vendored
13
packages/bun-types/redis.d.ts
vendored
@@ -596,6 +596,19 @@ declare module "bun" {
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: string | ArrayBufferView | Blob): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Ping the server
|
||||
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(): Promise<"PONG">;
|
||||
|
||||
/**
|
||||
* Ping the server with a message
|
||||
* @param message The message to send to the server
|
||||
* @returns Promise that resolves with the message if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(message: string | ArrayBufferView | Blob): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
22
packages/bun-types/test.d.ts
vendored
22
packages/bun-types/test.d.ts
vendored
@@ -88,15 +88,19 @@ declare module "bun:test" {
|
||||
*/
|
||||
export function setSystemTime(now?: Date | number): ThisType<void>;
|
||||
|
||||
interface Jest {
|
||||
restoreAllMocks(): void;
|
||||
clearAllMocks(): void;
|
||||
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
setSystemTime(now?: number | Date): void;
|
||||
setTimeout(milliseconds: number): void;
|
||||
}
|
||||
export const jest: Jest;
|
||||
export namespace jest {
|
||||
function restoreAllMocks(): void;
|
||||
function clearAllMocks(): void;
|
||||
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
function setSystemTime(now?: number | Date): void;
|
||||
function setTimeout(milliseconds: number): void;
|
||||
function useFakeTimers(): void;
|
||||
function useRealTimers(): void;
|
||||
function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
/**
|
||||
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
|
||||
*/
|
||||
@@ -146,7 +150,7 @@ declare module "bun:test" {
|
||||
export function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<T[K] extends (...args: any[]) => any ? T[K] : never>;
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
interface FunctionLike {
|
||||
readonly name: string;
|
||||
|
||||
@@ -440,7 +440,7 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
|
||||
socket->flags.is_paused = 0;
|
||||
socket->flags.is_ipc = 0;
|
||||
socket->connect_state = NULL;
|
||||
|
||||
socket->connect_next = NULL;
|
||||
|
||||
us_internal_socket_context_link_socket(context, socket);
|
||||
|
||||
@@ -459,7 +459,7 @@ static void init_addr_with_port(struct addrinfo* info, int port, struct sockaddr
|
||||
}
|
||||
}
|
||||
|
||||
static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
|
||||
static bool try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *storage) {
|
||||
memset(storage, 0, sizeof(struct sockaddr_storage));
|
||||
// Try to parse as IPv4
|
||||
struct sockaddr_in *addr4 = (struct sockaddr_in *)storage;
|
||||
@@ -469,7 +469,7 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
|
||||
#ifdef __APPLE__
|
||||
addr4->sin_len = sizeof(struct sockaddr_in);
|
||||
#endif
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Try to parse as IPv6
|
||||
@@ -480,17 +480,17 @@ static int try_parse_ip(const char *ip_str, int port, struct sockaddr_storage *s
|
||||
#ifdef __APPLE__
|
||||
addr6->sin6_len = sizeof(struct sockaddr_in6);
|
||||
#endif
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// If we reach here, the input is neither IPv4 nor IPv6
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* is_connecting) {
|
||||
void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size, int* has_dns_resolved) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl == 1) {
|
||||
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, is_connecting);
|
||||
return us_internal_ssl_socket_context_connect((struct us_internal_ssl_socket_context_t *) context, host, port, options, socket_ext_size, has_dns_resolved);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -498,8 +498,8 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
|
||||
// fast path for IP addresses in text form
|
||||
struct sockaddr_storage addr;
|
||||
if (try_parse_ip(host, port, &addr) == 0) {
|
||||
*is_connecting = 1;
|
||||
if (try_parse_ip(host, port, &addr)) {
|
||||
*has_dns_resolved = 1;
|
||||
return us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
}
|
||||
|
||||
@@ -518,7 +518,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
if (result->entries && result->entries->info.ai_next == NULL) {
|
||||
struct sockaddr_storage addr;
|
||||
init_addr_with_port(&result->entries->info, port, &addr);
|
||||
*is_connecting = 1;
|
||||
*has_dns_resolved = 1;
|
||||
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
Bun__addrinfo_freeRequest(ai_req, s == NULL);
|
||||
return s;
|
||||
|
||||
@@ -213,7 +213,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
s->ssl_read_wants_write = 0;
|
||||
s->fatal_error = 0;
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
|
||||
|
||||
|
||||
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
|
||||
// if we allow renegotiation, we need to set the mode here
|
||||
@@ -255,7 +255,7 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
}
|
||||
|
||||
/// @brief Complete the shutdown or do a fast shutdown when needed, this should only be called before closing the socket
|
||||
/// @param s
|
||||
/// @param s
|
||||
int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fast_shutdown) {
|
||||
// if we are already shutdown or in the middle of a handshake we dont need to do anything
|
||||
// Scenarios:
|
||||
@@ -265,7 +265,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
|
||||
// 4 - we are in the middle of a handshake
|
||||
// 5 - we received a fatal error
|
||||
if(us_internal_ssl_socket_is_shut_down(s) || s->fatal_error || !SSL_is_init_finished(s->ssl)) return 1;
|
||||
|
||||
|
||||
// we are closing the socket but did not sent a shutdown yet
|
||||
int state = SSL_get_shutdown(s->ssl);
|
||||
int sent_shutdown = state & SSL_SENT_SHUTDOWN;
|
||||
@@ -277,7 +277,7 @@ int us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fa
|
||||
// Zero means that we should wait for the peer to close the connection
|
||||
// but we are already closing the connection so we do a fast shutdown here
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
if(ret == 0 && force_fast_shutdown) {
|
||||
if(ret == 0 && force_fast_shutdown) {
|
||||
// do a fast shutdown (dont wait for peer)
|
||||
ret = SSL_shutdown(s->ssl);
|
||||
}
|
||||
@@ -397,7 +397,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
// nothing todo here, renegotiation must be handled in SSL_read
|
||||
if (s->handshake_state != HANDSHAKE_PENDING)
|
||||
return;
|
||||
|
||||
|
||||
if (us_internal_ssl_socket_is_closed(s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
(s->ssl && SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN)) {
|
||||
|
||||
@@ -422,7 +422,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
s->fatal_error = 1;
|
||||
}
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
|
||||
|
||||
return;
|
||||
}
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
@@ -504,7 +504,7 @@ restart:
|
||||
loop_ssl_data->ssl_read_output +
|
||||
LIBUS_RECV_BUFFER_PADDING + read,
|
||||
LIBUS_RECV_BUFFER_LENGTH - read);
|
||||
|
||||
|
||||
if (just_read <= 0) {
|
||||
int err = SSL_get_error(s->ssl, just_read);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
@@ -603,7 +603,7 @@ restart:
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
|
||||
// Trigger writable if we failed last SSL_write with SSL_ERROR_WANT_READ
|
||||
// If we failed SSL_read because we need to write more data (SSL_ERROR_WANT_WRITE) we are not going to trigger on_writable, we will wait until the next on_data or on_writable event
|
||||
// SSL_read will try to flush the write buffer and if fails with SSL_ERROR_WANT_WRITE means the socket is not in a writable state anymore and only makes sense to trigger on_writable if we can write more data
|
||||
// Otherwise we possible would trigger on_writable -> on_data event in a recursive loop
|
||||
@@ -1133,7 +1133,7 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
|
||||
}
|
||||
|
||||
SSL_CTX *create_ssl_context_from_bun_options(
|
||||
struct us_bun_socket_context_options_t options,
|
||||
struct us_bun_socket_context_options_t options,
|
||||
enum create_bun_socket_error_t *err) {
|
||||
ERR_clear_error();
|
||||
|
||||
@@ -1250,8 +1250,8 @@ SSL_CTX *create_ssl_context_from_bun_options(
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// It may return spurious errors here.
|
||||
ERR_clear_error();
|
||||
// It may return spurious errors here.
|
||||
ERR_clear_error();
|
||||
|
||||
if (options.reject_unauthorized) {
|
||||
SSL_CTX_set_verify(ssl_context,
|
||||
@@ -1755,7 +1755,7 @@ int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s,
|
||||
|
||||
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
|
||||
const char *data, int length, int msg_more) {
|
||||
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) {
|
||||
return 0;
|
||||
}
|
||||
@@ -1989,7 +1989,7 @@ ssl_wrapped_context_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
if (wrapped_context->events.on_end) {
|
||||
wrapped_context->events.on_end((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -2082,7 +2082,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
struct us_socket_context_t *context = us_create_bun_ssl_socket_context(
|
||||
old_context->loop, sizeof(struct us_wrapped_socket_context_t),
|
||||
options, &err);
|
||||
|
||||
|
||||
// Handle SSL context creation failure
|
||||
if (UNLIKELY(!context)) {
|
||||
return NULL;
|
||||
@@ -2186,4 +2186,4 @@ us_socket_context_on_socket_connect_error(
|
||||
return socket;
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
||||
@@ -32,6 +32,11 @@
|
||||
#include <iostream>
|
||||
#include "MoveOnlyFunction.h"
|
||||
#include "HttpParser.h"
|
||||
#include <span>
|
||||
#include <array>
|
||||
#include <mutex>
|
||||
|
||||
|
||||
namespace uWS {
|
||||
template<bool> struct HttpResponse;
|
||||
|
||||
@@ -48,6 +53,78 @@ private:
|
||||
/* Minimum allowed receive throughput per second (clients uploading less than 16kB/sec get dropped) */
|
||||
static constexpr int HTTP_RECEIVE_THROUGHPUT_BYTES = 16 * 1024;
|
||||
|
||||
|
||||
#define FOR_EACH_HTTP_METHOD(MACRO) \
|
||||
MACRO("ACL") \
|
||||
MACRO("BIND") \
|
||||
MACRO("CHECKOUT") \
|
||||
MACRO("CONNECT") \
|
||||
MACRO("COPY") \
|
||||
MACRO("DELETE") \
|
||||
MACRO("GET") \
|
||||
MACRO("HEAD") \
|
||||
MACRO("LINK") \
|
||||
MACRO("LOCK") \
|
||||
MACRO("M-SEARCH") \
|
||||
MACRO("MERGE") \
|
||||
MACRO("MKACTIVITY") \
|
||||
MACRO("MKCALENDAR") \
|
||||
MACRO("MKCOL") \
|
||||
MACRO("MOVE") \
|
||||
MACRO("NOTIFY") \
|
||||
MACRO("OPTIONS") \
|
||||
MACRO("PATCH") \
|
||||
MACRO("POST") \
|
||||
MACRO("PROPFIND") \
|
||||
MACRO("PROPPATCH") \
|
||||
MACRO("PURGE") \
|
||||
MACRO("PUT") \
|
||||
MACRO("QUERY") \
|
||||
MACRO("REBIND") \
|
||||
MACRO("REPORT") \
|
||||
MACRO("SEARCH") \
|
||||
MACRO("SOURCE") \
|
||||
MACRO("SUBSCRIBE") \
|
||||
MACRO("TRACE") \
|
||||
MACRO("UNBIND") \
|
||||
MACRO("UNLINK") \
|
||||
MACRO("UNLOCK") \
|
||||
MACRO("UNSUBSCRIBE") \
|
||||
|
||||
|
||||
#ifndef _WIN32
|
||||
static constexpr std::array<const std::string, 35> HTTP_METHODS = {
|
||||
#define MACRO(name) std::string {name},
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
};
|
||||
static std::span<const std::string> getAllHttpMethods() {
|
||||
return {HTTP_METHODS.data(), HTTP_METHODS.size()};
|
||||
}
|
||||
#else
|
||||
// Windows, and older C++ can't do constexpr std::array<const std::string, 35>
|
||||
static constexpr std::array<const char*, 35> HTTP_METHODS = {
|
||||
#define MACRO(name) name,
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
};
|
||||
|
||||
static std::span<const std::string> getAllHttpMethods() {
|
||||
static std::once_flag flag;
|
||||
static std::array<std::string, 35> methods;
|
||||
std::call_once(flag, []() {
|
||||
methods = {
|
||||
#define MACRO(name) std::string {name},
|
||||
FOR_EACH_HTTP_METHOD(MACRO)
|
||||
#undef MACRO
|
||||
};
|
||||
});
|
||||
return {methods.data(), methods.size()};
|
||||
}
|
||||
#endif
|
||||
#undef FOR_EACH_HTTP_METHOD
|
||||
|
||||
|
||||
us_socket_context_t *getSocketContext() {
|
||||
return (us_socket_context_t *) this;
|
||||
}
|
||||
@@ -504,13 +581,23 @@ public:
|
||||
void onHttp(std::string_view method, std::string_view pattern, MoveOnlyFunction<void(HttpResponse<SSL> *, HttpRequest *)> &&handler, bool upgrade = false) {
|
||||
HttpContextData<SSL> *httpContextData = getSocketContextData();
|
||||
|
||||
std::vector<std::string> methods{std::string(method)};
|
||||
std::span<const std::string> methods;
|
||||
std::array<std::string, 1> methods_buffer;
|
||||
// When it's NOT node:http, allow the uWS default precedence ordering.
|
||||
if (method == "*" && !httpContextData->flags.useStrictMethodValidation) {
|
||||
methods = getAllHttpMethods();
|
||||
} else {
|
||||
methods_buffer[0] = std::string(method);
|
||||
methods = {methods_buffer.data(), 1};
|
||||
}
|
||||
|
||||
uint32_t priority = method == "*" ? httpContextData->currentRouter->LOW_PRIORITY : (upgrade ? httpContextData->currentRouter->HIGH_PRIORITY : httpContextData->currentRouter->MEDIUM_PRIORITY);
|
||||
|
||||
/* If we are passed nullptr then remove this */
|
||||
if (!handler) {
|
||||
httpContextData->currentRouter->remove(methods[0], pattern, priority);
|
||||
for (const auto &method : methods) {
|
||||
httpContextData->currentRouter->remove(method, pattern, priority);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include <span>
|
||||
#include <iostream>
|
||||
|
||||
#include "MoveOnlyFunction.h"
|
||||
@@ -278,7 +278,7 @@ public:
|
||||
}
|
||||
|
||||
/* Adds the corresponding entires in matching tree and handler list */
|
||||
void add(const std::vector<std::string> &methods, std::string_view pattern, MoveOnlyFunction<bool(HttpRouter *)> &&handler, uint32_t priority = MEDIUM_PRIORITY) {
|
||||
void add(const std::span<const std::string> &methods, std::string_view pattern, MoveOnlyFunction<bool(HttpRouter *)> &&handler, uint32_t priority = MEDIUM_PRIORITY) {
|
||||
/* First remove existing handler */
|
||||
remove(methods[0], pattern, priority);
|
||||
|
||||
|
||||
@@ -785,6 +785,23 @@
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"catalog": {
|
||||
"type": "object",
|
||||
"description": "A single default catalog for commonly used dependencies. Referenced with 'catalog:' in workspace package dependencies.",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"catalogs": {
|
||||
"type": "object",
|
||||
"description": "Multiple named catalogs for grouping dependencies. Referenced with 'catalog:catalogName' in workspace package dependencies.",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Version: 7
|
||||
# Version: 8
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on Windows 10 or newer with PowerShell.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 10
|
||||
# Version: 11
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
|
||||
112
scripts/fetch-node-test.ts
Normal file
112
scripts/fetch-node-test.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import path, { dirname, join } from "path";
|
||||
|
||||
const options: RequestInit = {};
|
||||
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
options.headers = {
|
||||
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchNodeTest(testName: string) {
|
||||
const nodeRepoUrl = "https://raw.githubusercontent.com/nodejs/node/main";
|
||||
const extensions = ["js", "mjs", "ts"];
|
||||
const testDirs = ["test/parallel", "test/sequential"];
|
||||
|
||||
// Try different combinations of test name patterns
|
||||
const testNameVariations = [
|
||||
testName,
|
||||
testName.startsWith("test-") ? testName : `test-${testName}`,
|
||||
testName.replace(/^test-/, ""),
|
||||
];
|
||||
|
||||
for (const testDir of testDirs) {
|
||||
for (const nameVariation of testNameVariations) {
|
||||
// Try with extensions
|
||||
for (const ext of extensions) {
|
||||
const testPath = `${testDir}/${nameVariation}.${ext}`;
|
||||
const url = `${nodeRepoUrl}/${testPath}`;
|
||||
|
||||
try {
|
||||
console.log(`Trying: ${url}`);
|
||||
const response = await fetch(url, options);
|
||||
if (response.ok) {
|
||||
const content = await response.text();
|
||||
const localPath = join("test/js/node", testPath);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
mkdirSync(dirname(localPath), { recursive: true });
|
||||
|
||||
// Write the file
|
||||
writeFileSync(localPath, content);
|
||||
console.log(
|
||||
`✅ Successfully fetched and saved: ${localPath} (${new Intl.NumberFormat("en-US", {
|
||||
notation: "compact",
|
||||
unit: "kilobyte",
|
||||
}).format(Buffer.byteLength(content, "utf-8"))})`,
|
||||
);
|
||||
return localPath;
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue to next variation
|
||||
}
|
||||
}
|
||||
|
||||
// Try without extension
|
||||
const testPath = `${testDir}/${nameVariation}`;
|
||||
const url = `${nodeRepoUrl}/${testPath}`;
|
||||
|
||||
try {
|
||||
console.log(`Trying: ${url}`);
|
||||
const response = await fetch(url, options);
|
||||
if (response.ok) {
|
||||
const content = await response.text();
|
||||
const localPath = join("test/js/node", testPath);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
mkdirSync(dirname(localPath), { recursive: true });
|
||||
|
||||
// Write the file
|
||||
writeFileSync(localPath, content);
|
||||
console.log(
|
||||
`✅ Successfully fetched and saved: ${localPath} (${new Intl.NumberFormat("en-US", {
|
||||
notation: "compact",
|
||||
unit: "kilobyte",
|
||||
}).format(Buffer.byteLength(content, "utf-8"))})`,
|
||||
);
|
||||
return localPath;
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue to next variation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`❌ Could not find test: ${testName}`);
|
||||
}
|
||||
|
||||
// Get test name from command line arguments
|
||||
let testName = process.argv[2];
|
||||
|
||||
if (testName.startsWith(path.join(import.meta.dirname, ".."))) {
|
||||
testName = testName.slice(path.join(import.meta.dirname, "..").length);
|
||||
}
|
||||
|
||||
if (testName.startsWith("test/parallel/")) {
|
||||
testName = testName.replace("test/parallel/", "");
|
||||
} else if (testName.startsWith("test/sequential/")) {
|
||||
testName = testName.replace("test/sequential/", "");
|
||||
}
|
||||
|
||||
if (!testName) {
|
||||
console.error("Usage: bun scripts/fetch-node-test.ts <test-name>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
await fetchNodeTest(testName);
|
||||
} catch (error) {
|
||||
console.error(error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
63
scripts/gamble.ts
Executable file
63
scripts/gamble.ts
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bun
|
||||
// usage: bun scripts/gamble.ts <number of attempts> <timeout in seconds> <command>
|
||||
|
||||
import assert from "node:assert";
|
||||
|
||||
const attempts = parseInt(process.argv[2]);
|
||||
const timeout = parseFloat(process.argv[3]);
|
||||
const argv = process.argv.slice(4);
|
||||
|
||||
let numTimedOut = 0;
|
||||
const signals = new Map<string, number>();
|
||||
const codes = new Map<number, number>();
|
||||
let numOk = 0;
|
||||
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
const proc = Bun.spawn({
|
||||
cmd: argv,
|
||||
timeout: 1000 * timeout,
|
||||
stdin: null,
|
||||
stdout: "ignore",
|
||||
stderr: "pipe",
|
||||
});
|
||||
await proc.exited;
|
||||
const errors = await new Response(proc.stderr).text();
|
||||
|
||||
const { signalCode: signal, exitCode } = proc;
|
||||
|
||||
if (signal === "SIGTERM") {
|
||||
// sent for timeouts
|
||||
numTimedOut += 1;
|
||||
} else if (signal) {
|
||||
const newCount = 1 + (signals.get(signal) ?? 0);
|
||||
signals.set(signal, newCount);
|
||||
} else if (exitCode !== 0) {
|
||||
// if null there should have been a signal
|
||||
assert(exitCode !== null);
|
||||
const newCount = 1 + (codes.get(exitCode) ?? 0);
|
||||
codes.set(exitCode, newCount);
|
||||
} else {
|
||||
numOk += 1;
|
||||
}
|
||||
if (exitCode !== 0) console.log(errors);
|
||||
process.stdout.write(exitCode === 0 ? "." : "!");
|
||||
}
|
||||
process.stdout.write("\n");
|
||||
|
||||
const width = attempts.toString().length;
|
||||
const pad = (num: number): string => num.toString().padStart(width, " ");
|
||||
const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`);
|
||||
const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`);
|
||||
|
||||
green(`${pad(numOk)}/${attempts} OK`);
|
||||
if (numTimedOut > 0) {
|
||||
red(`${pad(numTimedOut)}/${attempts} timeout`);
|
||||
}
|
||||
for (const [signal, count] of signals.entries()) {
|
||||
red(`${pad(count)}/${attempts} ${signal}`);
|
||||
}
|
||||
for (const [code, count] of codes.entries()) {
|
||||
red(`${pad(count)}/${attempts} code ${code}`);
|
||||
}
|
||||
|
||||
process.exit(numOk === attempts ? 0 : 1);
|
||||
@@ -18,12 +18,14 @@ async function globSources(output, patterns, excludes = []) {
|
||||
}
|
||||
total += paths.length;
|
||||
|
||||
const sources = paths
|
||||
.map(path => normalize(relative(root, path)))
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.join("\n");
|
||||
const sources =
|
||||
paths
|
||||
.map(path => normalize(relative(root, path)))
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.join("\n")
|
||||
.trim() + "\n";
|
||||
|
||||
await write(join(root, "cmake", output), sources);
|
||||
await write(join(root, "cmake", "sources", output), sources);
|
||||
}
|
||||
|
||||
const input = await file(join(root, "cmake", "Sources.json")).json();
|
||||
|
||||
@@ -858,7 +858,8 @@ function getSshKeys() {
|
||||
const sshFiles = readdirSync(sshPath, { withFileTypes: true, encoding: "utf-8" });
|
||||
const publicPaths = sshFiles
|
||||
.filter(entry => entry.isFile() && entry.name.endsWith(".pub"))
|
||||
.map(({ name }) => join(sshPath, name));
|
||||
.map(({ name }) => join(sshPath, name))
|
||||
.filter(path => !readFile(path, { cache: true }).startsWith("ssh-ed25519"));
|
||||
|
||||
sshKeys.push(
|
||||
...publicPaths.map(publicPath => ({
|
||||
|
||||
26
scripts/sync-webkit-source.ts
Normal file
26
scripts/sync-webkit-source.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
|
||||
const bunRepo = dirname(import.meta.dir);
|
||||
const webkitRepo = join(bunRepo, "vendor/WebKit");
|
||||
if (!existsSync(webkitRepo)) {
|
||||
console.log("could not find WebKit clone");
|
||||
console.log("clone https://github.com/oven-sh/WebKit.git to vendor/WebKit");
|
||||
console.log("or create a symlink/worktree to an existing clone");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.chdir(webkitRepo);
|
||||
const checkedOutCommit = (await Bun.$`git rev-parse HEAD`.text()).trim();
|
||||
const cmakeContents = await Bun.file(join(bunRepo, "cmake/tools/SetupWebKit.cmake")).text();
|
||||
const expectedCommit = cmakeContents.match(/set\(WEBKIT_VERSION ([0-9a-f]{40})\)/)![1];
|
||||
|
||||
if (checkedOutCommit == expectedCommit) {
|
||||
console.log(`already at commit ${expectedCommit}`);
|
||||
} else {
|
||||
console.log(`changing from ${checkedOutCommit} to ${expectedCommit}`);
|
||||
await Bun.$`git checkout main`;
|
||||
await Bun.$`git pull`;
|
||||
// it is OK that this leaves you with a detached HEAD
|
||||
await Bun.$`git checkout ${expectedCommit}`;
|
||||
}
|
||||
@@ -290,7 +290,7 @@ export async function spawn(command, options = {}) {
|
||||
if (exitCode !== 0 && isWindows) {
|
||||
const exitReason = getWindowsExitReason(exitCode);
|
||||
if (exitReason) {
|
||||
exitCode = exitReason;
|
||||
signalCode = exitReason;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -386,7 +386,7 @@ export function spawnSync(command, options = {}) {
|
||||
if (exitCode !== 0 && isWindows) {
|
||||
const exitReason = getWindowsExitReason(exitCode);
|
||||
if (exitReason) {
|
||||
exitCode = exitReason;
|
||||
signalCode = exitReason;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -442,9 +442,37 @@ export function spawnSyncSafe(command, options = {}) {
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
export function getWindowsExitReason(exitCode) {
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
|
||||
const nthStatus = readFile(ntStatusPath, { cache: true });
|
||||
const windowsKitPath = "C:\\Program Files (x86)\\Windows Kits";
|
||||
if (!existsSync(windowsKitPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const windowsKitPaths = readdirSync(windowsKitPath)
|
||||
.filter(filename => isFinite(parseInt(filename)))
|
||||
.sort((a, b) => parseInt(b) - parseInt(a));
|
||||
|
||||
let ntStatusPath;
|
||||
for (const windowsKitPath of windowsKitPaths) {
|
||||
const includePath = `${windowsKitPath}\\Include`;
|
||||
if (!existsSync(includePath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const windowsSdkPaths = readdirSync(includePath).sort();
|
||||
for (const windowsSdkPath of windowsSdkPaths) {
|
||||
const statusPath = `${includePath}\\${windowsSdkPath}\\shared\\ntstatus.h`;
|
||||
if (existsSync(statusPath)) {
|
||||
ntStatusPath = statusPath;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!ntStatusPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nthStatus = readFile(ntStatusPath, { cache: true });
|
||||
const match = nthStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i"));
|
||||
if (match) {
|
||||
const [, exitReason] = match;
|
||||
|
||||
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
155
scripts/zig-remove-unreferenced-top-level-decls.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
/**
|
||||
* Removes unreferenced top-level const declarations from a Zig file
|
||||
* Handles patterns like: const <IDENTIFIER> = @import(...) or const <IDENTIFIER> = ...
|
||||
*/
|
||||
export function removeUnreferencedImports(content: string): string {
|
||||
let modified = true;
|
||||
let result = content;
|
||||
|
||||
// Keep iterating until no more changes are made
|
||||
while (modified) {
|
||||
modified = false;
|
||||
const lines = result.split("\n");
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Match top-level const declarations: const <IDENTIFIER> = ...
|
||||
const constMatch = line.match(/^const\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=(.*)$/);
|
||||
|
||||
if (constMatch) {
|
||||
const identifier = constMatch[1];
|
||||
const assignmentPart = constMatch[2];
|
||||
|
||||
// Skip lines that contain '{' in the assignment (likely structs/objects)
|
||||
if (assignmentPart.includes("{")) {
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this identifier is referenced anywhere else in the file
|
||||
const isReferenced = isIdentifierReferenced(identifier, lines, i);
|
||||
|
||||
if (!isReferenced) {
|
||||
// Skip this line (delete it)
|
||||
modified = true;
|
||||
console.log(`Removing unreferenced import: ${identifier}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
newLines.push(line);
|
||||
}
|
||||
|
||||
result = newLines.join("\n");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an identifier is referenced anywhere in the file except at the declaration line
|
||||
*/
|
||||
function isIdentifierReferenced(identifier: string, lines: string[], declarationLineIndex: number): boolean {
|
||||
// Create a regex that matches the identifier as a whole word
|
||||
// This prevents matching partial words (e.g. "std" shouldn't match "stdx")
|
||||
const identifierRegex = new RegExp(`\\b${escapeRegex(identifier)}\\b`);
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
// Skip the declaration line itself
|
||||
if (i === declarationLineIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const line = lines[i];
|
||||
|
||||
// Check if the identifier appears in this line
|
||||
if (identifierRegex.test(line)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special regex characters in a string
|
||||
*/
|
||||
function escapeRegex(string: string): string {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single Zig file
|
||||
*/
|
||||
export function processZigFile(filePath: string): void {
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
const cleaned = removeUnreferencedImports(content);
|
||||
|
||||
if (content !== cleaned) {
|
||||
fs.writeFileSync(filePath, cleaned);
|
||||
console.log(`Cleaned: ${filePath}`);
|
||||
} else {
|
||||
console.log(`No changes: ${filePath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing ${filePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple Zig files or directories
|
||||
*/
|
||||
export function processFiles(paths: string[]): void {
|
||||
for (const inputPath of paths) {
|
||||
const stat = fs.statSync(inputPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// Process all .zig files in directory recursively
|
||||
processDirectory(inputPath);
|
||||
} else if (inputPath.endsWith(".zig")) {
|
||||
processZigFile(inputPath);
|
||||
} else {
|
||||
console.warn(`Skipping non-Zig file: ${inputPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively process all .zig files in a directory
|
||||
*/
|
||||
function processDirectory(dirPath: string): void {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
processDirectory(fullPath);
|
||||
} else if (entry.name.endsWith(".zig")) {
|
||||
processZigFile(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CLI usage
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.log("Usage: bun zig-remove-unreferenced-top-level-decls.ts <file1.zig> [file2.zig] [directory]...");
|
||||
console.log("");
|
||||
console.log("Examples:");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file.zig");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts src/");
|
||||
console.log(" bun zig-remove-unreferenced-top-level-decls.ts file1.zig file2.zig src/");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
processFiles(args);
|
||||
}
|
||||
@@ -3,7 +3,6 @@ const Environment = @import("./env.zig");
|
||||
|
||||
const Output = @import("output.zig");
|
||||
const use_mimalloc = bun.use_mimalloc;
|
||||
const StringTypes = @import("./string_types.zig");
|
||||
const Mimalloc = bun.Mimalloc;
|
||||
const bun = @import("bun");
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
const ImportKind = @import("./import_record.zig").ImportKind;
|
||||
const lol = @import("./deps/lol-html.zig");
|
||||
|
||||
@@ -26,7 +26,6 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const testing = std.testing;
|
||||
const Thread = std.Thread;
|
||||
const Futex = bun.Futex;
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const windows = std.os.windows;
|
||||
const testing = std.testing;
|
||||
const assert = bun.assert;
|
||||
const Progress = @This();
|
||||
const bun = @import("bun");
|
||||
|
||||
@@ -46,7 +46,7 @@ pub const HashType = u32;
|
||||
const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex);
|
||||
|
||||
/// Initializes a watcher. Each watcher is tied to some context type, which
|
||||
/// recieves watch callbacks on the watcher thread. This function does not
|
||||
/// receives watch callbacks on the watcher thread. This function does not
|
||||
/// actually start the watcher thread.
|
||||
///
|
||||
/// const watcher = try Watcher.init(T, instance_of_t, fs, bun.default_allocator)
|
||||
@@ -670,12 +670,9 @@ const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const strings = bun.strings;
|
||||
const stringZ = bun.stringZ;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const options = @import("./options.zig");
|
||||
const Mutex = bun.Mutex;
|
||||
const Futex = @import("./futex.zig");
|
||||
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
const std = @import("std");
|
||||
|
||||
const FeatureFlags = @import("./feature_flags.zig");
|
||||
const Environment = @import("./env.zig");
|
||||
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
|
||||
const bun = @import("bun");
|
||||
const OOM = bun.OOM;
|
||||
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
const mem = @import("std").mem;
|
||||
const builtin = @import("std").builtin;
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const log = bun.Output.scoped(.mimalloc, true);
|
||||
const assert = bun.assert;
|
||||
const Allocator = mem.Allocator;
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
const FeatureFlags = @import("../feature_flags.zig");
|
||||
const Environment = @import("../env.zig");
|
||||
|
||||
fn mimalloc_free(
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const mem = @import("std").mem;
|
||||
const builtin = @import("std").builtin;
|
||||
const std = @import("std");
|
||||
|
||||
const mimalloc = @import("./mimalloc.zig");
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user