Compare commits

..

1 Commits

Author SHA1 Message Date
Don Isaac
a634127dca fix(napi): memory leak when creating strings 2025-02-13 13:23:41 -08:00
1031 changed files with 29783 additions and 96689 deletions

View File

@@ -621,21 +621,6 @@ function getReleaseStep(buildPlatforms, options) {
};
}
/**
* @returns {Step}
*/
function getBenchmarkStep() {
return {
key: "benchmark",
label: "📊",
agents: {
queue: "build-zig",
},
command: "bun .buildkite/scripts/upload-benchmark.ts",
depends_on: [`linux-x64-build-bun`],
};
}
/**
* @typedef {Object} Pipeline
* @property {Step[]} [steps]
@@ -1114,8 +1099,6 @@ async function getPipeline(options = {}) {
steps.push(getReleaseStep(buildPlatforms, options));
}
steps.push(getBenchmarkStep());
/** @type {Map<string, GroupStep>} */
const stepsByGroup = new Map();

View File

@@ -1,7 +0,0 @@
import { getCommit, getSecret } from "../../scripts/utils.mjs";
console.log("Submitting...");
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
method: "POST",
});
console.log("Got status " + response.status);

View File

@@ -158,36 +158,25 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_discord_announcement() {
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
if [ -z "$value" ]; then
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local version="$1"
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local short_sha="${commit:0:7}"
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
local artifact_path="${commit}"
if [ "$version" == "canary" ]; then
local json_payload=$(cat <<EOF
{
"embeds": [{
"title": "New Bun Canary now available",
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
"color": 16023551,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}]
}
EOF
)
curl -H "Content-Type: application/json" \
-d "$json_payload" \
-sf \
"$value" >/dev/null
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
@@ -238,7 +227,7 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_discord_announcement "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -1,27 +0,0 @@
---
description: How to build Bun
globs:
---
# How to build Bun
## CMake
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
* `CMakeLists.txt`
* `cmake/`
* `Globals.cmake` - macros and functions used by all the other files
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
## How to
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
```sh
bun run build # builds a debug build: `build/debug/bun-debug`
bun run build:release # builds a release build: `build/release/bun`
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
```

View File

@@ -1,139 +0,0 @@
---
description: Writing HMR/Dev Server tests
globs: test/bake/*
---
# Writing HMR/Dev Server tests
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
## File Structure
- `test/bake/bake-harness.ts` - shared utilities and test harness
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
- class `Dev` (controls subprocess for dev server)
- class `Client` (controls a happy-dom subprocess for having the page open)
- more helpers
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
## Categories
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
css.test.ts - CSS tests concern bundling bugs with CSS files
plugins.test.ts - Plugin tests concern plugins in development mode.
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
esm.test.ts - ESM tests are about various esm features in development mode.
html.test.ts - HTML tests are tests relating to HTML files themselves.
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
sourcemap.test.ts - Tests verifying source-maps are correct.
## `devTest` Basics
A test takes in two primary inputs: `files` and `async test(dev) {`
```ts
import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
"index.html": emptyHtmlFile({
scripts: ["/script.ts"],
body: "<h1>Hello</h1>",
}),
"script.ts": `
console.log("hello");
`,
},
async test(dev) {
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.patch("index.html", {
find: "Hello",
replace: "World",
});
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
// Works
await using c = await dev.client("/");
await c.expectMessage("hello");
// Editing HTML reloads
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "World",
replace: "Hello",
});
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "Hello",
replace: "Bar",
});
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("script.ts", {
find: "hello",
replace: "world",
});
});
await c.expectMessage("world");
},
});
```
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
## Testing for bundling errors
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
```ts
devTest("import then create", {
files: {
"index.html": `
<!DOCTYPE html>
<html>
<head></head>
<body>
<script type="module" src="/script.ts"></script>
</body>
</html>
`,
"script.ts": `
import data from "./data";
console.log(data);
`,
},
async test(dev) {
const c = await dev.client("/", {
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
});
await c.expectReload(async () => {
await dev.write("data.ts", "export default 'data';");
});
await c.expectMessage("data");
},
});
```
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
```ts
await dev.delete("other.ts", {
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
});
```

View File

@@ -268,16 +268,16 @@ If there's a class, prototype, and constructor:
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
```c++#ZigGlobalObject.cpp
void GlobalObject::finishCreation(VM& vm) {
// ...
```c++
m_JSStatsBigIntClassStructure.initLater(
[](LazyClassStructure::Initializer& init) {
// Call the function to initialize our class structure.
Bun::initJSBigIntStatsClassStructure(init);
});
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`.
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
@@ -296,36 +296,6 @@ void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
}
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
});
```
Then, implement the function that creates the structure:
```c++
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
{
// If there is a prototype:
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++

View File

@@ -1,91 +0,0 @@
---
description: Writing tests for Bun
globs:
---
# Writing tests for Bun
## Where tests are found
You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
## How tests are written
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
```js
import { describe, test, expect } from "bun:test";
import assert, { AssertionError } from "assert";
describe("assert(expr)", () => {
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
expect(() => assert(expr)).not.toThrow();
});
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
expect(() => assert(expr)).toThrow(AssertionError);
});
});
```
## Testing conventions
* See `test/harness.ts` for common test utilities and helpers
* Be rigorous and test for edge-cases and unexpected inputs
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
* When you need to test Bun as a CLI, use the following pattern:
```js
import { test, expect } from "bun:test";
import { spawn } from "bun";
import { bunExe, bunEnv } from "harness";
test("bun --version", async () => {
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
cmd: [bunExe(), "--version"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [ exitCode, stdout, stderr ] = await Promise.all([
exited,
new Response(stdoutStream).text(),
new Response(stderrStream).text(),
]);
expect({ exitCode, stdout, stderr }).toMatchObject({
exitCode: 0,
stdout: expect.stringContaining(Bun.version),
stderr: "",
});
});
```
## Before writing a test
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
* Always attempt to find related tests in an existing test file before creating a new test file

View File

@@ -70,7 +70,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.2.0"
- name: Install Dependencies
run: bun install
- name: Sign Release
@@ -98,7 +98,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.2.0"
- name: Install Dependencies
run: bun install
- name: Release
@@ -127,7 +127,7 @@ jobs:
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.2.0"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
@@ -167,16 +167,12 @@ jobs:
permissions:
contents: read
steps:
- name: Checkout (DefinitelyTyped)
- name: Checkout
uses: actions/checkout@v4
with:
repository: DefinitelyTyped/DefinitelyTyped
- name: Checkout (bun)
uses: actions/checkout@v4
with:
path: bun
- name: Setup Bun
uses: ./bun/.github/actions/setup-bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.0"
- id: bun-version
@@ -231,7 +227,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
@@ -239,7 +235,7 @@ jobs:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
@@ -256,7 +252,7 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64

2
.gitignore vendored
View File

@@ -181,5 +181,3 @@ tmp
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
*.bun-build

View File

@@ -36,7 +36,6 @@
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {

View File

@@ -67,7 +67,7 @@ $ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
```
```bash#Arch
$ sudo pacman -S llvm clang18 lld
$ sudo pacman -S llvm clang lld
```
```bash#Fedora

2
LATEST
View File

@@ -1 +1 @@
1.2.4
1.2.2

View File

@@ -1154,7 +1154,7 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
@@ -1205,7 +1205,7 @@ jsc-copy-headers-debug:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h

View File

@@ -1,27 +1,19 @@
import { Glob } from "bun";
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
function benchPattern(name, glob, pattern) {
bench(name, () => {
new Glob(glob).match(pattern);
})
}
benchPattern("max-depth" , "1{2,3{4,5{6,7{8,9{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "13579bdfhjlm");
benchPattern("non-ascii", "😎/¢£.{ts,tsx,js,jsx}", "😎/¢£.jsx");
benchPattern("utf8", "フォルダ/**/*", "フォルダ/aaa.js");
benchPattern("non-ascii+max-depth" , "1{2,3{4,5{6,7{8,😎{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "1357😎bdfhjlm");
benchPattern("pretty-average", "test/{foo/**,bar}/baz", "test/bar/baz");
benchPattern("pretty-average-2", "a/**/c/*.md", "a/bb.bb/aa/b.b/aa/c/xyz.md");
benchPattern("pretty-average-3", "a/b/**/c{d,e}/**/xyz.md", "a/b/cd/xyz.md");
benchPattern("pretty-average-4", "foo/bar/**/one/**/*.*", "foo/bar/baz/one/two/three/image.png");
benchPattern("long-pretty-average", "some/**/needle.{js,tsx,mdx,ts,jsx,txt}", "some/a/bigger/path/to/the/crazy/needle.txt");
benchPattern("brackets-lots", "f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "foo-bar");
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
min_max: true,
percentiles: true,
avg: true,
})
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,19 +0,0 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,17 +0,0 @@
const buf = Buffer.from(
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
);
const INTERVAL = 9_999_999;
const time = (name, fn) => {
for (let i = 0; i < INTERVAL; i++) fn();
console.time(name.padEnd(30));
for (let i = 0; i < INTERVAL; i++) fn();
console.timeEnd(name.padEnd(30));
};
console.log(`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`, "\n");
time("includes true", () => buf.includes("nisi"));
time("includes false", () => buf.includes("oopwo"));

View File

@@ -1,71 +0,0 @@
import { bench, run } from "../runner.mjs";
let decodeURIComponentSIMD;
if (typeof Bun !== "undefined") {
({ decodeURIComponentSIMD } = await import("bun:internal-for-testing"));
}
const hugeText = Buffer.alloc(1000000, "Hello, world!").toString();
const hugeTextWithPercentAtEnd = Buffer.alloc(1000000, "Hello, world!%40").toString();
const tinyText = Buffer.alloc(100, "Hello, world!").toString();
const tinyTextWithPercentAtEnd = Buffer.alloc(100, "Hello, world!%40").toString();
const veryTinyText = Buffer.alloc(8, "a").toString();
const veryTinyTextWithPercentAtEnd = Buffer.alloc(8, "a%40").toString();
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 8 bytes", () => {
decodeURIComponentSIMD(veryTinyText);
});
bench(" decodeURIComponent - no % x 8 bytes", () => {
decodeURIComponent(veryTinyText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 8 bytes", () => {
decodeURIComponentSIMD(veryTinyTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 8 bytes", () => {
decodeURIComponent(veryTinyTextWithPercentAtEnd);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 100 bytes", () => {
decodeURIComponentSIMD(tinyText);
});
bench(" decodeURIComponent - no % x 100 bytes", () => {
decodeURIComponent(tinyText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 100 bytes", () => {
decodeURIComponentSIMD(tinyTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 100 bytes", () => {
decodeURIComponent(tinyTextWithPercentAtEnd);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 1 MB", () => {
decodeURIComponentSIMD(hugeText);
});
bench(" decodeURIComponent - no % x 1 MB", () => {
decodeURIComponent(hugeText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 1 MB", () => {
decodeURIComponentSIMD(hugeTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 1 MB", () => {
decodeURIComponent(hugeTextWithPercentAtEnd);
});
await run();

View File

@@ -25,10 +25,9 @@ comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
"Bun requires Zig version " ++ recommended_zig_version ++ " (found " ++
builtin.zig_version_string ++ "). This is " ++
"automatically configured via Bun's CMake setup. You likely meant to run " ++
"`bun setup`. If you are trying to upgrade the Zig compiler, " ++
"Bun requires Zig version " ++ recommended_zig_version ++ ". This is" ++
"automatically configured via Bun's CMake setup. You likely meant to run" ++
"`bun setup`. If you are trying to upgrade the Zig compiler," ++
"run `./scripts/download-zig.sh master` or comment this message out.",
);
}

View File

@@ -45,21 +45,21 @@
"packages": {
"@biomejs/biome": ["@biomejs/biome@1.8.3", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.8.3", "@biomejs/cli-darwin-x64": "1.8.3", "@biomejs/cli-linux-arm64": "1.8.3", "@biomejs/cli-linux-arm64-musl": "1.8.3", "@biomejs/cli-linux-x64": "1.8.3", "@biomejs/cli-linux-x64-musl": "1.8.3", "@biomejs/cli-win32-arm64": "1.8.3", "@biomejs/cli-win32-x64": "1.8.3" }, "bin": { "biome": "bin/biome" } }, "sha512-/uUV3MV+vyAczO+vKrPdOW0Iaet7UnJMU4bNMinggGJTAnBPjCoLEYcyYtYHNnUNYlv4xZMH6hVIQCAozq8d5w=="],
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.8.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-9DYOjclFpKrH/m1Oz75SSExR8VKvNSSsLnVIqdnKexj6NwmiMlKk94Wa1kZEdv6MCOHGHgyyoV57Cw8WzL5n3A=="],
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.8.3", "", { "os":"darwin", "cpu":"arm64" }, "sha512-9DYOjclFpKrH/m1Oz75SSExR8VKvNSSsLnVIqdnKexj6NwmiMlKk94Wa1kZEdv6MCOHGHgyyoV57Cw8WzL5n3A=="],
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.8.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-UeW44L/AtbmOF7KXLCoM+9PSgPo0IDcyEUfIoOXYeANaNXXf9mLUwV1GeF2OWjyic5zj6CnAJ9uzk2LT3v/wAw=="],
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.8.3", "", { "os":"darwin", "cpu":"x64" }, "sha512-UeW44L/AtbmOF7KXLCoM+9PSgPo0IDcyEUfIoOXYeANaNXXf9mLUwV1GeF2OWjyic5zj6CnAJ9uzk2LT3v/wAw=="],
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.8.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-fed2ji8s+I/m8upWpTJGanqiJ0rnlHOK3DdxsyVLZQ8ClY6qLuPc9uehCREBifRJLl/iJyQpHIRufLDeotsPtw=="],
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.8.3", "", { "os":"linux", "cpu":"arm64" }, "sha512-fed2ji8s+I/m8upWpTJGanqiJ0rnlHOK3DdxsyVLZQ8ClY6qLuPc9uehCREBifRJLl/iJyQpHIRufLDeotsPtw=="],
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.8.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-9yjUfOFN7wrYsXt/T/gEWfvVxKlnh3yBpnScw98IF+oOeCYb5/b/+K7YNqKROV2i1DlMjg9g/EcN9wvj+NkMuQ=="],
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.8.3", "", { "os":"linux", "cpu":"arm64" }, "sha512-9yjUfOFN7wrYsXt/T/gEWfvVxKlnh3yBpnScw98IF+oOeCYb5/b/+K7YNqKROV2i1DlMjg9g/EcN9wvj+NkMuQ=="],
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.8.3", "", { "os": "linux", "cpu": "x64" }, "sha512-I8G2QmuE1teISyT8ie1HXsjFRz9L1m5n83U1O6m30Kw+kPMPSKjag6QGUn+sXT8V+XWIZxFFBoTDEDZW2KPDDw=="],
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.8.3", "", { "os":"linux", "cpu":"x64" }, "sha512-I8G2QmuE1teISyT8ie1HXsjFRz9L1m5n83U1O6m30Kw+kPMPSKjag6QGUn+sXT8V+XWIZxFFBoTDEDZW2KPDDw=="],
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.8.3", "", { "os": "linux", "cpu": "x64" }, "sha512-UHrGJX7PrKMKzPGoEsooKC9jXJMa28TUSMjcIlbDnIO4EAavCoVmNQaIuUSH0Ls2mpGMwUIf+aZJv657zfWWjA=="],
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.8.3", "", { "os":"linux", "cpu":"x64" }, "sha512-UHrGJX7PrKMKzPGoEsooKC9jXJMa28TUSMjcIlbDnIO4EAavCoVmNQaIuUSH0Ls2mpGMwUIf+aZJv657zfWWjA=="],
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.8.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-J+Hu9WvrBevfy06eU1Na0lpc7uR9tibm9maHynLIoAjLZpQU3IW+OKHUtyL8p6/3pT2Ju5t5emReeIS2SAxhkQ=="],
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.8.3", "", { "os":"win32", "cpu":"arm64" }, "sha512-J+Hu9WvrBevfy06eU1Na0lpc7uR9tibm9maHynLIoAjLZpQU3IW+OKHUtyL8p6/3pT2Ju5t5emReeIS2SAxhkQ=="],
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.8.3", "", { "os": "win32", "cpu": "x64" }, "sha512-/PJ59vA1pnQeKahemaQf4Nyj7IKUvGQSc3Ze1uIGi+Wvr1xF7rGobSrAAG01T/gUDG21vkDsZYM03NAmPiVkqg=="],
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.8.3", "", { "os":"win32", "cpu":"x64" }, "sha512-/PJ59vA1pnQeKahemaQf4Nyj7IKUvGQSc3Ze1uIGi+Wvr1xF7rGobSrAAG01T/gUDG21vkDsZYM03NAmPiVkqg=="],
"@definitelytyped/dts-critic": ["@definitelytyped/dts-critic@0.0.191", "", { "dependencies": { "@definitelytyped/header-parser": "0.0.190", "command-exists": "^1.2.9", "semver": "^7.5.4", "tmp": "^0.2.1", "typescript": "^5.2.2", "yargs": "^17.7.2" } }, "sha512-j5HK3pQYiQwSXRLJzyhXJ6KxdzLl4gXXhz3ysCtLnRQkj+zsEfloDkEZ3x2bZMWS0OsKLXmR91JeQ2/c9DFEjg=="],
@@ -75,51 +75,51 @@
"@es-joy/jsdoccomment": ["@es-joy/jsdoccomment@0.39.4", "", { "dependencies": { "comment-parser": "1.3.1", "esquery": "^1.5.0", "jsdoc-type-pratt-parser": "~4.0.0" } }, "sha512-Jvw915fjqQct445+yron7Dufix9A+m9j1fCJYlCo1FWlRvTxa3pjJelxdSTdaLWcTwRU6vbL+NYjO4YuNIS5Qg=="],
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os":"aix", "cpu":"ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os": "android", "cpu": "arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os":"android", "cpu":"arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="],
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os": "android", "cpu": "arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="],
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os":"android", "cpu":"arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="],
"@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os": "android", "cpu": "x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="],
"@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os":"android", "cpu":"x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="],
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="],
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os":"darwin", "cpu":"arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="],
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="],
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os":"darwin", "cpu":"x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="],
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="],
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os":"freebsd", "cpu":"arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="],
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="],
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os":"freebsd", "cpu":"x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="],
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os": "linux", "cpu": "arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="],
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os":"linux", "cpu":"arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="],
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="],
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os":"linux", "cpu":"arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="],
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="],
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os":"linux", "cpu":"ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="],
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="],
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="],
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="],
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="],
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="],
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os":"linux", "cpu":"ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="],
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="],
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="],
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="],
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os":"linux", "cpu":"s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="],
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os": "linux", "cpu": "x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="],
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os":"linux", "cpu":"x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="],
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os": "none", "cpu": "x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="],
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os":"none", "cpu":"x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="],
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="],
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os":"openbsd", "cpu":"x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="],
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="],
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os":"sunos", "cpu":"x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="],
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="],
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os":"win32", "cpu":"arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="],
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="],
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os":"win32", "cpu":"ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="],
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os":"win32", "cpu":"x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
"@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.0", "", { "dependencies": { "eslint-visitor-keys": "^3.3.0" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA=="],

84
ci/README.md Normal file
View File

@@ -0,0 +1,84 @@
# CI
This directory contains scripts for building CI images for Bun.
## Building
### `macOS`
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
To install the dependencies required, run:
```sh
$ cd ci
$ bun run bootstrap
```
To build a vanilla macOS VM, run:
```sh
$ bun run build:darwin-aarch64-vanilla
```
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
> Note: The image size is 50GB, so make sure you have enough disk space.
If you want to build a specific macOS release, you can run:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: You cannot build a newer release of macOS on an older macOS machine.
To build a macOS VM with software installed to build and test Bun, run:
```sh
$ bun run build:darwin-aarch64
```
## Running
### `macOS`
## How To
### Support a new macOS release
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
```hcl
sonoma = {
distro = "sonoma"
release = "15"
ipsw = "https://updates.cdn-apple.com/..."
}
```
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
4. Test and build the non-vanilla image:
```sh
$ bun run build:darwin-aarch64-15
```
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
5. Publish the images:
```sh
$ bun run login
$ bun run publish:darwin-aarch64-vanilla-15
$ bun run publish:darwin-aarch64-15
```

View File

@@ -0,0 +1,22 @@
FROM alpine:edge AS build
ARG GIT_SHA
ENV GIT_SHA=${GIT_SHA}
WORKDIR /app/bun
ENV HOME=/root
COPY . .
RUN touch $HOME/.bashrc
RUN ./scripts/bootstrap.sh
RUN . $HOME/.bashrc && bun run build:release
RUN apk add file
RUN file ./build/release/bun
RUN ldd ./build/release/bun
RUN ./build/release/bun
RUN cp -R /app/bun/build/* /output
FROM scratch AS artifact
COPY --from=build /output /
# docker build -f ./ci/alpine/build.Dockerfile --progress=plain --build-arg GIT_SHA="$(git rev-parse HEAD)" --target=artifact --output type=local,dest=./build-alpine .

20
ci/alpine/test.Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
FROM alpine:edge
ENV HOME=/root
WORKDIR /root
COPY ./build-alpine/release/bun .
COPY ./test ./test
COPY ./scripts ./scripts
COPY ./package.json ./package.json
COPY ./packages ./packages
RUN apk update
RUN apk add nodejs lsb-release-minimal git python3 npm make g++
RUN apk add file
RUN file /root/bun
RUN ldd /root/bun
RUN /root/bun
RUN ./scripts/runner.node.mjs --exec-path /root/bun
# docker build -f ./ci/alpine/test.Dockerfile --progress=plain .

View File

@@ -0,0 +1,46 @@
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
# See login.sh and optimize.sh for details.
data "external-raw" "boot-script" {
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
}
source "tart-cli" "bun-darwin-aarch64-vanilla" {
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
from_ipsw = local.release.ipsw
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
create_grace_time = "30s"
boot_command = split("\n", data.external-raw.boot-script.result)
headless = true # Disable if you need to debug why the boot_command is not working
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
provisioner "file" {
content = file("scripts/setup-login.sh")
destination = "/tmp/setup-login.sh"
}
provisioner "shell" {
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
}
provisioner "file" {
content = file("scripts/optimize-machine.sh")
destination = "/tmp/optimize-machine.sh"
}
provisioner "shell" {
inline = ["sudo sh /tmp/optimize-machine.sh"]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

44
ci/darwin/image.pkr.hcl Normal file
View File

@@ -0,0 +1,44 @@
# Generates a macOS VM with software installed to build and test Bun.
source "tart-cli" "bun-darwin-aarch64" {
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
headless = true
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64"]
provisioner "file" {
content = file("../../scripts/bootstrap.sh")
destination = "/tmp/bootstrap.sh"
}
provisioner "shell" {
inline = ["CI=true sh /tmp/bootstrap.sh"]
}
provisioner "file" {
source = "darwin/plists/"
destination = "/tmp/"
}
provisioner "shell" {
inline = [
"sudo ls /tmp/",
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.buildkite.buildkite-agent</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/buildkite-agent</string>
<string>start</string>
</array>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false />
</dict>
<key>RunAtLoad</key>
<true />
<key>StandardOutPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>EnvironmentVariables</key>
<dict>
<key>BUILDKITE_AGENT_CONFIG</key>
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
</dict>
<key>LimitLoadToSessionType</key>
<array>
<string>Aqua</string>
<string>LoginWindow</string>
<string>Background</string>
<string>StandardIO</string>
<string>System</string>
</array>
</dict>
</plist>

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscale</string>
<string>up</string>
<string>--ssh</string>
<string>--authkey</string>
<string>${TAILSCALE_AUTHKEY}</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscaled</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

124
ci/darwin/scripts/boot-image.sh Executable file
View File

@@ -0,0 +1,124 @@
#!/bin/sh
# This script generates the boot commands for the macOS installer GUI.
# It is run on your local machine, not inside the VM.
# Sources:
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
echo "Script must be run with variables: release, username, and password" >&2
exit 1
fi
# Hello, hola, bonjour, etc.
echo "<wait120s><spacebar>"
# Select Your Country and Region
echo "<wait30s>italiano<esc>english<enter>"
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
# Written and Spoken Languages
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Accessibility
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Data & Privacy
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Migration Assistant
echo "<wait30s><tab><tab><tab><spacebar>"
# Sign In with Your Apple ID
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you want to skip signing in with an Apple ID?
echo "<wait30s><tab><spacebar>"
# Terms and Conditions
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# I have read and agree to the macOS Software License Agreement
echo "<wait30s><tab><spacebar>"
# Create a Computer Account
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
# Enable Location Services
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you don't want to use Location Services?
echo "<wait30s><tab><spacebar>"
# Select Your Time Zone
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
# Analytics
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Screen Time
echo "<wait30s><tab><spacebar>"
# Siri
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
# Choose Your Look
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
# Enable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
else
# Welcome to Mac
echo "<wait30s><spacebar>"
# Enable Keyboard navigation
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
echo "<wait30s><leftAltOn>q<leftAltOff>"
fi
# Now that the installation is done, open "System Settings"
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
# Navigate to "Sharing"
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
if [ "${release}" = "13" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><down><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
# Open "Remote Login" details
echo "<wait30s><tab><spacebar>"
# Enable "Full Disk Access"
echo "<wait30s><tab><spacebar>"
# Click "Done"
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Disable Voice Over
echo "<leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "14" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
# Disable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "15" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
fi
# Quit System Settings
echo "<wait30s><leftAltOn>q<leftAltOff>"

View File

@@ -0,0 +1,122 @@
#!/bin/sh
# This script optimizes macOS for virtualized environments.
# It disables things like spotlight, screen saver, and sleep.
# Sources:
# - https://github.com/sickcodes/osx-optimizer
# - https://github.com/koding88/MacBook-Optimization-Script
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
disable_software_update() {
execute softwareupdate --schedule off
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
execute defaults write com.apple.commerce AutoUpdate -bool false
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
}
disable_spotlight() {
execute mdutil -i off -a
execute mdutil -E /
}
disable_siri() {
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
execute defaults write com.apple.Siri StatusMenuVisible -bool false
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
}
disable_sleep() {
execute systemsetup -setsleep Never
execute systemsetup -setcomputersleep Never
execute systemsetup -setdisplaysleep Never
execute systemsetup -setharddisksleep Never
}
disable_screen_saver() {
execute defaults write com.apple.screensaver loginWindowIdleTime 0
execute defaults write com.apple.screensaver idleTime 0
}
disable_screen_lock() {
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
}
disable_wallpaper() {
execute defaults write com.apple.loginwindow DesktopPicture ""
}
disable_application_state() {
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
}
disable_accessibility() {
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
execute defaults write com.apple.universalaccess reduceMotion -int 1
execute defaults write com.apple.universalaccess reduceTransparency -int 1
}
disable_dashboard() {
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
execute killall Dock
}
disable_animations() {
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
execute defaults write -g QLPanelAnimationDuration -float 0
execute defaults write com.apple.finder DisableAllAnimations -bool true
}
disable_time_machine() {
execute tmutil disable
}
enable_performance_mode() {
# https://support.apple.com/en-us/101992
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
fi
}
add_terminal_to_desktop() {
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
}
main() {
disable_software_update
disable_spotlight
disable_siri
disable_sleep
disable_screen_saver
disable_screen_lock
disable_wallpaper
disable_application_state
disable_accessibility
disable_dashboard
disable_animations
disable_time_machine
enable_performance_mode
add_terminal_to_desktop
}
main

View File

@@ -0,0 +1,78 @@
#!/bin/sh
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
# Sources:
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
kcpassword() {
passwd="$1"
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
key_len=33
passwd_len=${#passwd_hex}
remainder=$((passwd_len % key_len))
if [ $remainder -ne 0 ]; then
padding=$((key_len - remainder))
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
fi
result=""
i=0
while [ $i -lt ${#passwd_hex} ]; do
for byte in $key; do
[ $i -ge ${#passwd_hex} ] && break
p="${passwd_hex:$i:2}"
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
result="${result}${r}"
i=$((i + 2))
done
done
echo "$result"
}
login() {
username="$1"
password="$2"
enable_passwordless_sudo() {
execute mkdir -p /etc/sudoers.d/
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
}
enable_auto_login() {
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
}
disable_screen_lock() {
execute sysadminctl -screenLock off -password "${password}"
}
enable_passwordless_sudo
enable_auto_login
disable_screen_lock
}
if [ $# -ne 2 ]; then
echo "Usage: $0 <username> <password>" >&2
exit 1
fi
login "$@"

View File

@@ -0,0 +1,78 @@
packer {
required_plugins {
tart = {
version = ">= 1.12.0"
source = "github.com/cirruslabs/tart"
}
external = {
version = ">= 0.0.2"
source = "github.com/joomcode/external"
}
}
}
variable "release" {
type = number
default = 13
}
variable "username" {
type = string
default = "admin"
}
variable "password" {
type = string
default = "admin"
}
variable "cpu_count" {
type = number
default = 2
}
variable "memory_gb" {
type = number
default = 4
}
variable "disk_size_gb" {
type = number
default = 50
}
locals {
sequoia = {
tier = 1
distro = "sequoia"
release = "15"
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
}
sonoma = {
tier = 2
distro = "sonoma"
release = "14"
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
}
ventura = {
tier = 2
distro = "ventura"
release = "13"
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
}
releases = {
15 = local.sequoia
14 = local.sonoma
13 = local.ventura
}
release = local.releases[var.release]
username = var.username
password = var.password
cpu_count = var.cpu_count
memory_gb = var.memory_gb
disk_size_gb = var.disk_size_gb
}

18
ci/linux/Dockerfile Normal file
View File

@@ -0,0 +1,18 @@
ARG IMAGE=debian:11
FROM $IMAGE
COPY ./scripts/bootstrap.sh /tmp/bootstrap.sh
ENV CI=true
RUN sh /tmp/bootstrap.sh && rm -rf /tmp/*
WORKDIR /workspace/bun
COPY bunfig.toml bunfig.toml
COPY package.json package.json
COPY CMakeLists.txt CMakeLists.txt
COPY cmake/ cmake/
COPY scripts/ scripts/
COPY patches/ patches/
COPY *.zig ./
COPY src/ src/
COPY packages/ packages/
COPY test/ test/
RUN bun i
RUN bun run build:ci

View File

@@ -0,0 +1,27 @@
#!/bin/sh
# This script sets the hostname of the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <hostname>" >&2
exit 1
fi
if [ -f "$(which hostnamectl)" ]; then
execute hostnamectl set-hostname "$1"
else
echo "Error: hostnamectl is not installed." >&2
exit 1
fi
}
main "$@"

View File

@@ -0,0 +1,22 @@
#!/bin/sh
# This script starts tailscale on the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <auth-key>" >&2
exit 1
fi
execute tailscale up --reset --ssh --accept-risk=lose-ssh --auth-key="$1"
}
main "$@"

27
ci/package.json Normal file
View File

@@ -0,0 +1,27 @@
{
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "token=$(gh auth token); username=$(gh api user --jq .login); echo \"Login as $username...\"; echo \"$token\" | tart login ghcr.io --username \"$username\" --password-stdin; echo \"$token\" | docker login ghcr.io --username \"$username\" --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
}
}

View File

@@ -1,16 +1,16 @@
get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME)
message(STATUS "Running script: ${SCRIPT_NAME}")
if(NOT ZIG_PATH OR NOT ZIG_COMMIT)
message(FATAL_ERROR "ZIG_PATH and ZIG_COMMIT required")
if(NOT ZIG_PATH OR NOT ZIG_COMMIT OR NOT ZIG_VERSION)
message(FATAL_ERROR "ZIG_PATH, ZIG_COMMIT, and ZIG_VERSION are required")
endif()
if(CMAKE_HOST_APPLE)
set(ZIG_OS_ABI "macos-none")
set(ZIG_OS "macos")
elseif(CMAKE_HOST_WIN32)
set(ZIG_OS_ABI "windows-gnu")
set(ZIG_OS "windows")
elseif(CMAKE_HOST_UNIX)
set(ZIG_OS_ABI "linux-musl")
set(ZIG_OS "linux")
else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_HOST_SYSTEM_NAME}")
endif()
@@ -28,16 +28,22 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}")
endif()
set(ZIG_NAME bootstrap-${ZIG_ARCH}-${ZIG_OS_ABI})
set(ZIG_FILENAME ${ZIG_NAME}.zip)
set(ZIG_ASAN "")
if(ENABLE_ASAN)
set(ZIG_ASAN "+asan")
endif()
set(ZIG_NAME zig-${ZIG_OS}-${ZIG_ARCH}-${ZIG_VERSION}${ZIG_ASAN})
if(CMAKE_HOST_WIN32)
set(ZIG_EXE "zig.exe")
set(ZIG_FILENAME ${ZIG_NAME}.zip)
else()
set(ZIG_EXE "zig")
set(ZIG_FILENAME ${ZIG_NAME}.tar.xz)
endif()
set(ZIG_DOWNLOAD_URL https://github.com/oven-sh/zig/releases/download/autobuild-${ZIG_COMMIT}/${ZIG_FILENAME})
set(ZIG_DOWNLOAD_URL https://bun-ci-assets.bun.sh/${ZIG_FILENAME})
execute_process(
COMMAND
@@ -61,8 +67,35 @@ if(NOT EXISTS ${ZIG_PATH}/${ZIG_EXE})
endif()
# Tools like VSCode need a stable path to the zig executable, on both Unix and Windows
# To workaround this, we create a `zig.exe` & `zls.exe` symlink on Unix.
# To workaround this, we create a `bun.exe` symlink on Unix.
if(NOT WIN32)
file(CREATE_LINK ${ZIG_PATH}/${ZIG_EXE} ${ZIG_PATH}/zig.exe SYMBOLIC)
file(CREATE_LINK ${ZIG_PATH}/zls ${ZIG_PATH}/zls.exe SYMBOLIC)
endif()
set(ZIG_REPOSITORY_PATH ${ZIG_PATH}/repository)
execute_process(
COMMAND
${CMAKE_COMMAND}
-DGIT_PATH=${ZIG_REPOSITORY_PATH}
-DGIT_REPOSITORY=oven-sh/zig
-DGIT_COMMIT=${ZIG_COMMIT}
-P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake
ERROR_STRIP_TRAILING_WHITESPACE
ERROR_VARIABLE
ZIG_REPOSITORY_ERROR
RESULT_VARIABLE
ZIG_REPOSITORY_RESULT
)
if(NOT ZIG_REPOSITORY_RESULT EQUAL 0)
message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}")
endif()
file(REMOVE_RECURSE ${ZIG_PATH}/lib)
# Use copy_directory instead of file(RENAME) because there were
# race conditions in CI where some files were not copied.
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${ZIG_REPOSITORY_PATH}/lib ${ZIG_PATH}/lib)
file(REMOVE_RECURSE ${ZIG_REPOSITORY_PATH})

View File

@@ -432,7 +432,6 @@ set(BUN_OBJECT_LUT_SOURCES
${CWD}/src/bun.js/bindings/BunProcess.cpp
${CWD}/src/bun.js/bindings/ProcessBindingBuffer.cpp
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
@@ -445,7 +444,6 @@ set(BUN_OBJECT_LUT_OUTPUTS
${CODEGEN_PATH}/BunProcess.lut.h
${CODEGEN_PATH}/ProcessBindingBuffer.lut.h
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
${CODEGEN_PATH}/ProcessBindingFs.lut.h
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
${CODEGEN_PATH}/NodeModuleModule.lut.h
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
@@ -622,7 +620,6 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/sqlite/*.cpp
${CWD}/src/bun.js/bindings/webcrypto/*.cpp
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
${CWD}/src/bun.js/bindings/node/crypto/*.cpp
${CWD}/src/bun.js/bindings/v8/*.cpp
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
${CWD}/src/bake/*.cpp
@@ -760,7 +757,6 @@ target_include_directories(${bun} PRIVATE
${CWD}/src/bun.js/bindings
${CWD}/src/bun.js/bindings/webcore
${CWD}/src/bun.js/bindings/webcrypto
${CWD}/src/bun.js/bindings/node/crypto
${CWD}/src/bun.js/bindings/sqlite
${CWD}/src/bun.js/bindings/v8
${CWD}/src/bun.js/modules

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
litespeedtech/ls-hpack
COMMIT
8905c024b6d052f083a3d11d0a169b3c2735c8a1
32e96f10593c7cb8553cd8c9c12721100ae9e924
)
if(WIN32)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
7a4d7b8d18f8159a808aade63eb93ea6abd06924
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
)
set(MIMALLOC_CMAKE_ARGS
@@ -31,7 +31,13 @@ if(ENABLE_VALGRIND)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
endif()
if(DEBUG)
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
else()
set(MIMALLOC_LIBRARY mimalloc-static)
endif()
elseif(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-debug)
else()
set(MIMALLOC_LIBRARY mimalloc)

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION f14adff4c39ba08174b84a942f9584028fc9a7ae)
set(WEBKIT_VERSION 57004f91903936881b3301594d9d67708f1ff64c)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -20,7 +20,8 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
set(ZIG_COMMIT "bb9d6ab2c0bbbf20cc24dad03e88f3b3ffdb7de7")
optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.14.0-dev.2987+183bb8b08")
optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "02c57c7ee3b8fde7528c74dd06490834d2d6fae9")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")
@@ -76,6 +77,7 @@ register_command(
COMMAND
${CMAKE_COMMAND}
-DZIG_PATH=${ZIG_PATH}
-DZIG_VERSION=${ZIG_VERSION}
-DZIG_COMMIT=${ZIG_COMMIT}
-DENABLE_ASAN=${ENABLE_ASAN}
-P ${CWD}/cmake/scripts/DownloadZig.cmake

View File

@@ -1,4 +1,4 @@
FROM debian:bookworm-slim AS build
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest

View File

@@ -1,4 +1,4 @@
FROM debian:bookworm-slim AS build
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest

View File

@@ -1,4 +1,4 @@
FROM debian:bookworm-slim AS build
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest

View File

@@ -1,7 +1,3 @@
{% callout %}
**⚠️ Warning** — `bun:ffi` is **experimental**, with known bugs and limitations, and should not be relied on in production. The most stable way to interact with native code from Bun is to write a [Node-API module](/docs/api/node-api).
{% /callout %}
Use the built-in `bun:ffi` module to efficiently call native libraries from JavaScript. It works with languages that support the C ABI (Zig, Rust, C/C++, C#, Nim, Kotlin, etc).
## dlopen usage (`bun:ffi`)
@@ -302,11 +298,7 @@ setTimeout(() => {
When you're done with a JSCallback, you should call `close()` to free the memory.
### Experimental thread-safe callbacks
`JSCallback` has experimental support for thread-safe callbacks. This will be needed if you pass a callback function into a different thread from its instantiation context. You can enable it with the optional `threadsafe` parameter.
Currently, thread-safe callbacks work best when run from another thread that is running JavaScript code, i.e. a [`Worker`](/docs/api/workers). A future version of Bun will enable them to be called from any thread (such as new threads spawned by your native library that Bun is not aware of).
`JSCallback` has experimental support for thread-safe callbacks. This will be needed if you pass a callback function into a different thread from it's instantiation context. You can enable it with the optional `threadsafe` option flag.
```ts
const searchIterator = new JSCallback(
(ptr, length) => /hello/.test(new CString(ptr, length)),
@@ -317,6 +309,7 @@ const searchIterator = new JSCallback(
},
);
```
Be aware that there are still cases where this does not 100% work.
{% callout %}

View File

@@ -8,421 +8,19 @@ To start a high-performance HTTP server with a clean API, the recommended approa
## `Bun.serve()`
Use `Bun.serve` to start an HTTP server in Bun.
Start an HTTP server in Bun with `Bun.serve`.
```ts
Bun.serve({
// `routes` requires Bun v1.2.3+
routes: {
// Static routes
"/api/status": new Response("OK"),
// Dynamic routes
"/users/:id": req => {
return new Response(`Hello User ${req.params.id}!`);
},
// Per-HTTP method handlers
"/api/posts": {
GET: () => new Response("List posts"),
POST: async req => {
const body = await req.json();
return Response.json({ created: true, ...body });
},
},
// Wildcard route for all routes that start with "/api/" and aren't otherwise matched
"/api/*": Response.json({ message: "Not found" }, { status: 404 }),
// Redirect from /blog/hello to /blog/hello/world
"/blog/hello": Response.redirect("/blog/hello/world"),
// Serve a file by buffering it in memory
"/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), {
headers: {
"Content-Type": "image/x-icon",
},
}),
},
// (optional) fallback for unmatched routes:
// Required if Bun's version < 1.2.3
fetch(req) {
return new Response("Not Found", { status: 404 });
return new Response("Bun!");
},
});
```
### Routing
Routes in `Bun.serve()` receive a `BunRequest` (which extends [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)) and return a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or `Promise<Response>`. This makes it easier to use the same code for both sending & receiving HTTP requests.
```ts
// Simplified for brevity
interface BunRequest<T extends string> extends Request {
params: Record<T, string>;
}
```
#### Async/await in routes
You can use async/await in route handlers to return a `Promise<Response>`.
```ts
import { sql, serve } from "bun";
serve({
port: 3001,
routes: {
"/api/version": async () => {
const [version] = await sql`SELECT version()`;
return Response.json(version);
},
},
});
```
#### Promise in routes
You can also return a `Promise<Response>` from a route handler.
```ts
import { sql, serve } from "bun";
serve({
routes: {
"/api/version": () => {
return new Promise(resolve => {
setTimeout(async () => {
const [version] = await sql`SELECT version()`;
resolve(Response.json(version));
}, 100);
});
},
},
});
```
#### Type-safe route parameters
TypeScript parses route parameters when passed as a string literal, so that your editor will show autocomplete when accessing `request.params`.
```ts
import type { BunRequest } from "bun";
Bun.serve({
routes: {
// TypeScript knows the shape of params when passed as a string literal
"/orgs/:orgId/repos/:repoId": req => {
const { orgId, repoId } = req.params;
return Response.json({ orgId, repoId });
},
"/orgs/:orgId/repos/:repoId/settings": (
// optional: you can explicitly pass a type to BunRequest:
req: BunRequest<"/orgs/:orgId/repos/:repoId/settings">,
) => {
const { orgId, repoId } = req.params;
return Response.json({ orgId, repoId });
},
},
});
```
Percent-encoded route parameter values are automatically decoded. Unicode characters are supported. Invalid unicode is replaced with the unicode replacement character `&0xFFFD;`.
### Static responses
Routes can also be `Response` objects (without the handler function). Bun.serve() optimizes it for zero-allocation dispatch - perfect for health checks, redirects, and fixed content:
```ts
Bun.serve({
routes: {
// Health checks
"/health": new Response("OK"),
"/ready": new Response("Ready", {
headers: {
// Pass custom headers
"X-Ready": "1",
},
}),
// Redirects
"/blog": Response.redirect("https://bun.sh/blog"),
// API responses
"/api/config": Response.json({
version: "1.0.0",
env: "production",
}),
},
});
```
Static responses do not allocate additional memory after initialization. You can generally expect at least a 15% performance improvement over manually returning a `Response` object.
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
```ts
const server = Bun.serve({
static: {
"/api/time": new Response(new Date().toISOString()),
},
fetch(req) {
return new Response("404!");
},
});
// Update the time every second.
setInterval(() => {
server.reload({
static: {
"/api/time": new Response(new Date().toISOString()),
},
fetch(req) {
return new Response("404!");
},
});
}, 1000);
```
Reloading routes only impact the next request. In-flight requests continue to use the old routes. After in-flight requests to old routes are finished, the old routes are freed from memory.
To simplify error handling, static routes do not support streaming response bodies from `ReadableStream` or an `AsyncIterator`. Fortunately, you can still buffer the response in memory first:
```ts
const time = await fetch("https://api.example.com/v1/data");
// Buffer the response in memory first.
const blob = await time.blob();
const server = Bun.serve({
static: {
"/api/data": new Response(blob),
},
fetch(req) {
return new Response("404!");
},
});
```
### Route precedence
Routes are matched in order of specificity:
1. Exact routes (`/users/all`)
2. Parameter routes (`/users/:id`)
3. Wildcard routes (`/users/*`)
4. Global catch-all (`/*`)
```ts
Bun.serve({
routes: {
// Most specific first
"/api/users/me": () => new Response("Current user"),
"/api/users/:id": req => new Response(`User ${req.params.id}`),
"/api/*": () => new Response("API catch-all"),
"/*": () => new Response("Global catch-all"),
},
});
```
### Per-HTTP Method Routes
Route handlers can be specialized by HTTP method:
```ts
Bun.serve({
routes: {
"/api/posts": {
// Different handlers per method
GET: () => new Response("List posts"),
POST: async req => {
const post = await req.json();
return Response.json({ id: crypto.randomUUID(), ...post });
},
PUT: async req => {
const updates = await req.json();
return Response.json({ updated: true, ...updates });
},
DELETE: () => new Response(null, { status: 204 }),
},
},
});
```
You can pass any of the following methods:
| Method | Usecase example |
| --------- | ------------------------------- |
| `GET` | Fetch a resource |
| `HEAD` | Check if a resource exists |
| `OPTIONS` | Get allowed HTTP methods (CORS) |
| `DELETE` | Delete a resource |
| `PATCH` | Update a resource |
| `POST` | Create a resource |
| `PUT` | Update a resource |
When passing a function instead of an object, all methods will be handled by that function:
```ts
const server = Bun.serve({
routes: {
"/api/version": () => Response.json({ version: "1.0.0" }),
},
});
await fetch(new URL("/api/version", server.url));
await fetch(new URL("/api/version", server.url), { method: "PUT" });
// ... etc
```
### Hot Route Reloading
Update routes without server restarts using `server.reload()`:
```ts
const server = Bun.serve({
routes: {
"/api/version": () => Response.json({ version: "1.0.0" }),
},
});
// Deploy new routes without downtime
server.reload({
routes: {
"/api/version": () => Response.json({ version: "2.0.0" }),
},
});
```
### Error Handling
Bun provides structured error handling for routes:
```ts
Bun.serve({
routes: {
// Errors are caught automatically
"/api/risky": () => {
throw new Error("Something went wrong");
},
},
// Global error handler
error(error) {
console.error(error);
return new Response(`Internal Error: ${error.message}`, {
status: 500,
headers: {
"Content-Type": "text/plain",
},
});
},
});
```
### HTML imports
To add a client-side single-page app, you can use an HTML import:
```ts
import myReactSinglePageApp from "./index.html";
Bun.serve({
routes: {
"/": myReactSinglePageApp,
},
});
```
HTML imports don't just serve HTML. It's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser.
You can use this to build a full-featured frontend with React, TypeScript, Tailwind CSS, and more. Check out [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack) to learn more.
### Practical example: REST API
Here's a basic database-backed REST API using Bun's router with zero dependencies:
{% codetabs %}
```ts#server.ts
import type { Post } from "./types.ts";
import { Database } from "bun:sqlite";
const db = new Database("posts.db");
db.exec(`
CREATE TABLE IF NOT EXISTS posts (
id TEXT PRIMARY KEY,
title TEXT NOT NULL,
content TEXT NOT NULL,
created_at TEXT NOT NULL
)
`);
Bun.serve({
routes: {
// List posts
"/api/posts": {
GET: () => {
const posts = db.query("SELECT * FROM posts").all();
return Response.json(posts);
},
// Create post
POST: async req => {
const post: Omit<Post, "id" | "created_at"> = await req.json();
const id = crypto.randomUUID();
db.query(
`INSERT INTO posts (id, title, content, created_at)
VALUES (?, ?, ?, ?)`,
).run(id, post.title, post.content, new Date().toISOString());
return Response.json({ id, ...post }, { status: 201 });
},
},
// Get post by ID
"/api/posts/:id": req => {
const post = db
.query("SELECT * FROM posts WHERE id = ?")
.get(req.params.id);
if (!post) {
return new Response("Not Found", { status: 404 });
}
return Response.json(post);
},
},
error(error) {
console.error(error);
return new Response("Internal Server Error", { status: 500 });
},
});
```
```ts#types.ts
export interface Post {
id: string;
title: string;
content: string;
created_at: string;
}
```
{% /codetabs %}
### Routing performance
`Bun.serve()`'s router builds on top uWebSocket's [tree-based approach](https://github.com/oven-sh/bun/blob/0d1a00fa0f7830f8ecd99c027fce8096c9d459b6/packages/bun-uws/src/HttpRouter.h#L57-L64) to add [SIMD-accelerated route parameter decoding](https://github.com/oven-sh/bun/blob/main/src/bun.js/bindings/decodeURIComponentSIMD.cpp#L21-L271) and [JavaScriptCore structure caching](https://github.com/oven-sh/bun/blob/main/src/bun.js/bindings/ServerRouteList.cpp#L100-L101) to push the performance limits of what modern hardware allows.
### `fetch` request handler
The `fetch` handler handles incoming requests that weren't matched by any route. It receives a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object and returns a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or [`Promise<Response>`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
The `fetch` handler handles incoming requests. It receives a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object and returns a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) or `Promise<Response>`.
```ts
Bun.serve({
@@ -472,6 +70,116 @@ const server = Bun.serve({
});
```
### Static routes
Use the `static` option to serve static `Response` objects by route.
```ts
// Bun v1.1.27+ required
Bun.serve({
static: {
// health-check endpoint
"/api/health-check": new Response("All good!"),
// redirect from /old-link to /new-link
"/old-link": Response.redirect("/new-link", 301),
// serve static text
"/": new Response("Hello World"),
// serve a file by buffering it in memory
"/index.html": new Response(await Bun.file("./index.html").bytes(), {
headers: {
"Content-Type": "text/html",
},
}),
"/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), {
headers: {
"Content-Type": "image/x-icon",
},
}),
// serve JSON
"/api/version.json": Response.json({ version: "1.0.0" }),
},
fetch(req) {
return new Response("404!");
},
});
```
Static routes support headers, status code, and other `Response` options.
```ts
Bun.serve({
static: {
"/api/time": new Response(new Date().toISOString(), {
headers: {
"X-Custom-Header": "Bun!",
},
}),
},
fetch(req) {
return new Response("404!");
},
});
```
Static routes can serve Response bodies faster than `fetch` handlers because they don't create `Request` objects, they don't create `AbortSignal`, they don't create additional `Response` objects. The only per-request memory allocation is the TCP/TLS socket data needed for each request.
{% note %}
`static` is experimental
{% /note %}
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
```ts
const server = Bun.serve({
static: {
"/api/time": new Response(new Date().toISOString()),
},
fetch(req) {
return new Response("404!");
},
});
// Update the time every second.
setInterval(() => {
server.reload({
static: {
"/api/time": new Response(new Date().toISOString()),
},
fetch(req) {
return new Response("404!");
},
});
}, 1000);
```
Reloading static routes only impact the next request. In-flight requests continue to use the old static routes. After in-flight requests to old static routes are finished, the old static routes are freed from memory.
To simplify error handling, static routes do not support streaming response bodies from `ReadableStream` or an `AsyncIterator`. Fortunately, you can still buffer the response in memory first:
```ts
const time = await fetch("https://api.example.com/v1/data");
// Buffer the response in memory first.
const blob = await time.blob();
const server = Bun.serve({
static: {
"/api/data": new Response(blob),
},
fetch(req) {
return new Response("404!");
},
});
```
### Changing the `port` and `hostname`
To configure which port and hostname the server will listen on, set `port` and `hostname` in the options object.
@@ -845,7 +553,7 @@ Update the server's handlers without restarting:
```ts
const server = Bun.serve({
routes: {
static: {
"/api/version": Response.json({ version: "v1" }),
},
fetch(req) {
@@ -855,7 +563,7 @@ const server = Bun.serve({
// Update to new handler
server.reload({
routes: {
static: {
"/api/version": Response.json({ version: "v2" }),
},
fetch(req) {
@@ -864,7 +572,7 @@ server.reload({
});
```
This is useful for development and hot reloading. Only `fetch`, `error`, and `routes` can be updated.
This is useful for development and hot reloading. Only `fetch`, `error`, and `static` handlers can be updated.
## Per-Request Controls

View File

@@ -181,8 +181,8 @@ const download = s3.presign("my-file.txt"); // GET, text/plain, expires in 24 ho
const upload = s3.presign("my-file", {
expiresIn: 3600, // 1 hour
method: "PUT",
type: "application/json", // No extension for inferring, so we can specify the content type to be JSON
method: 'PUT',
type: 'application/json', // No extension for inferring, so we can specify the content type to be JSON
});
// You can call .presign() if on a file reference, but avoid doing so
@@ -361,56 +361,6 @@ const minio = new S3Client({
});
```
### Using Bun's S3Client with supabase
To use Bun's S3 client with [supabase](https://supabase.com/), set `endpoint` to the supabase endpoint in the `S3Client` constructor. The supabase endpoint includes your account ID and /storage/v1/s3 path. Make sure to set Enable connection via S3 protocol on in the supabase dashboard in https://supabase.com/dashboard/project/<account-id>/settings/storage and to set the region informed in the same section.
```ts
import { S3Client } from "bun";
const supabase = new S3Client({
accessKeyId: "access-key",
secretAccessKey: "secret-key",
bucket: "my-bucket",
region: "us-west-1",
endpoint: "https://<account-id>.supabase.co/storage/v1/s3/storage",
});
```
### Using Bun's S3Client with S3 Virtual Hosted-Style endpoints
When using a S3 Virtual Hosted-Style endpoint, you need to set the `virtualHostedStyle` option to `true` and if no endpoint is provided, Bun will use region and bucket to infer the endpoint to AWS S3, if no region is provided it will use `us-east-1`. If you provide a the endpoint, there are no need to provide the bucket name.
```ts
import { S3Client } from "bun";
// AWS S3 endpoint inferred from region and bucket
const s3 = new S3Client({
accessKeyId: "access-key",
secretAccessKey: "secret-key",
bucket: "my-bucket",
virtualHostedStyle: true,
// endpoint: "https://my-bucket.s3.us-east-1.amazonaws.com",
// region: "us-east-1",
});
// AWS S3
const s3WithEndpoint = new S3Client({
accessKeyId: "access-key",
secretAccessKey: "secret-key",
endpoint: "https://<bucket-name>.s3.<region>.amazonaws.com",
virtualHostedStyle: true,
});
// Cloudflare R2
const r2WithEndpoint = new S3Client({
accessKeyId: "access-key",
secretAccessKey: "secret-key",
endpoint: "https://<bucket-name>.<account-id>.r2.cloudflarestorage.com",
virtualHostedStyle: true,
});
```
## Credentials
Credentials are one of the hardest parts of using S3, and we've tried to make it as easy as possible. By default, Bun reads the following environment variables for credentials.

View File

@@ -87,7 +87,7 @@ await sql`INSERT INTO users ${sql(users)}`;
### Picking columns to insert
You can use `sql(object, ...string)` to pick which columns to insert. Each of the columns must be defined on the object.
You can use `sql(object, Array<string>)` to pick which columns to insert. Each of the columns must be defined on the object.
```ts
const user = {
@@ -96,7 +96,7 @@ const user = {
age: 25,
};
await sql`INSERT INTO users ${sql(user, "name", "email")}`;
await sql`INSERT INTO users ${sql(user, ["name", "email"])}`;
// Only inserts name and email columns, ignoring other fields
```
@@ -165,72 +165,13 @@ await sql`
`;
```
### Dynamic columns in updates
You can use `sql(object, ...string)` to pick which columns to update. Each of the columns must be defined on the object. If the columns are not informed all keys will be used to update the row.
```ts
await sql`UPDATE users SET ${sql(user, "name", "email")} WHERE id = ${user.id}`;
// uses all keys from the object to update the row
await sql`UPDATE users SET ${sql(user)} WHERE id = ${user.id}`;
```
### Dynamic values and `where in`
Value lists can also be created dynamically, making where in queries simple too. Optionally you can pass a array of objects and inform what key to use to create the list.
```ts
await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`;
const users = [
{ id: 1, name: "Alice" },
{ id: 2, name: "Bob" },
{ id: 3, name: "Charlie" },
];
await sql`SELECT * FROM users WHERE id IN ${sql(users, "id")}`;
```
## `sql``.simple()`
The PostgreSQL wire protocol supports two types of queries: "simple" and "extended". Simple queries can contain multiple statements but don't support parameters, while extended queries (the default) support parameters but only allow one statement.
To run multiple statements in a single query, use `sql``.simple()`:
```ts
// Multiple statements in one query
await sql`
SELECT 1;
SELECT 2;
`.simple();
```
Simple queries are often useful for database migrations and setup scripts.
Note that simple queries cannot use parameters (`${value}`). If you need parameters, you must split your query into separate statements.
### Queries in files
You can use the `sql.file` method to read a query from a file and execute it, if the file includes $1, $2, etc you can pass parameters to the query. If no parameters are used it can execute multiple commands per file.
```ts
const result = await sql.file("query.sql", [1, 2, 3]);
```
### Unsafe Queries
You can use the `sql.unsafe` function to execute raw SQL strings. Use this with caution, as it will not escape user input. Executing more than one command per query is allowed if no parameters are used.
You can use the `sql.unsafe` function to execute raw SQL strings. Use this with caution, as it will not escape user input.
```ts
// Multiple commands without parameters
const result = await sql.unsafe(`
SELECT ${userColumns} FROM users;
SELECT ${accountColumns} FROM accounts;
`);
// Using parameters (only one command is allowed)
const result = await sql.unsafe(
"SELECT " + dangerous + " FROM users WHERE id = $1",
[id],
"SELECT " + columns + " FROM users WHERE id = " + id,
);
```
@@ -321,21 +262,6 @@ const db = new SQL({
});
```
## Dynamic passwords
When clients need to use alternative authentication schemes such as access tokens or connections to databases with rotating passwords, provide either a synchronous or asynchronous function that will resolve the dynamic password value at connection time.
```ts
import { SQL } from "bun";
const sql = new SQL(url, {
// Other connection config
...
// Password function for the database user
password: async () => await signer.getAuthToken(),
});
```
## Transactions
To start a new transaction, use `sql.begin`. This method reserves a dedicated connection for the duration of the transaction and provides a scoped `sql` instance to use within the callback function. Once the callback completes, `sql.begin` resolves with the return value of the callback.
@@ -505,34 +431,6 @@ try {
} // Automatically released
```
## Prepared Statements
By default, Bun's SQL client automatically creates named prepared statements for queries where it can be inferred that the query is static. This provides better performance. However, you can change this behavior by setting `prepare: false` in the connection options:
```ts
const sql = new SQL({
// ... other options ...
prepare: false, // Disable persisting named prepared statements on the server
});
```
When `prepare: false` is set:
Queries are still executed using the "extended" protocol, but they are executed using [unnamed prepared statements](https://www.postgresql.org/docs/current/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY), an unnamed prepared statement lasts only until the next Parse statement specifying the unnamed statement as destination is issued.
- Parameter binding is still safe against SQL injection
- Each query is parsed and planned from scratch by the server
- Queries will not be [pipelined](https://www.postgresql.org/docs/current/protocol-flow.html#PROTOCOL-FLOW-PIPELINING)
You might want to use `prepare: false` when:
- Using PGBouncer in transaction mode (though since PGBouncer 1.21.0, protocol-level named prepared statements are supported when configured properly)
- Debugging query execution plans
- Working with dynamic SQL where query plans need to be regenerated frequently
- More than one command per query will not be supported (unless you use `sql``.simple()`)
Note that disabling prepared statements may impact performance for queries that are executed frequently with different parameters, as the server needs to parse and plan each query from scratch.
## Error Handling
The client provides typed errors for different failure scenarios:
@@ -568,7 +466,6 @@ The client provides typed errors for different failure scenarios:
| `ERR_POSTGRES_SERVER_ERROR` | General error from PostgreSQL server |
| `ERR_POSTGRES_INVALID_QUERY_BINDING` | Invalid parameter binding |
| `ERR_POSTGRES_QUERY_CANCELLED` | Query was cancelled |
| `ERR_POSTGRES_NOT_TAGGED_CALL` | Query was called without a tagged call |
### Data Type Errors
@@ -604,7 +501,7 @@ The client provides typed errors for different failure scenarios:
## Numbers and BigInt
Bun's SQL client includes special handling for large numbers that exceed the range of a 53-bit integer. Here's how it works:
Bun's SQL client includes special handling for large numbers that exceed the range of a 53-bit integer. Heres how it works:
```ts
import { sql } from "bun";

View File

@@ -228,17 +228,3 @@ const worker = new Worker("./i-am-smol.ts", {
{% details summary="What does `smol` mode actually do?" %}
Setting `smol: true` sets `JSC::HeapSize` to be `Small` instead of the default `Large`.
{% /details %}
## `Bun.isMainThread`
You can check if you're in the main thread by checking `Bun.isMainThread`.
```ts
if (Bun.isMainThread) {
console.log("I'm the main thread");
} else {
console.log("I'm in a worker");
}
```
This is useful for conditionally running code based on whether you're in the main thread or not.

View File

@@ -75,16 +75,14 @@ bun build --compile --target=bun-darwin-x64 ./path/to/my/app.ts --outfile myapp
The order of the `--target` flag does not matter, as long as they're delimited by a `-`.
| --target | Operating System | Architecture | Modern | Baseline | Libc |
| --------------------- | ---------------- | ------------ | ------ | -------- | ----- |
| bun-linux-x64 | Linux | x64 | ✅ | ✅ | glibc |
| bun-linux-arm64 | Linux | arm64 | ✅ | N/A | glibc |
| bun-windows-x64 | Windows | x64 | ✅ | ✅ | - |
| ~~bun-windows-arm64~~ | Windows | arm64 | ❌ | ❌ | - |
| bun-darwin-x64 | macOS | x64 | ✅ | ✅ | - |
| bun-darwin-arm64 | macOS | arm64 | ✅ | N/A | - |
| bun-linux-x64-musl | Linux | x64 | ✅ | ✅ | musl |
| bun-linux-arm64-musl | Linux | arm64 | ✅ | N/A | musl |
| --target | Operating System | Architecture | Modern | Baseline |
| --------------------- | ---------------- | ------------ | ------ | -------- |
| bun-linux-x64 | Linux | x64 | ✅ | ✅ |
| bun-linux-arm64 | Linux | arm64 | ✅ | N/A |
| bun-windows-x64 | Windows | x64 | ✅ | ✅ |
| ~~bun-windows-arm64~~ | Windows | arm64 | ❌ | ❌ |
| bun-darwin-x64 | macOS | x64 | ✅ | ✅ |
| bun-darwin-arm64 | macOS | arm64 | ✅ | N/A |
On x64 platforms, Bun uses SIMD optimizations which require a modern CPU supporting AVX2 instructions. The `-baseline` build of Bun is for older CPUs that don't support these optimizations. Normally, when you install Bun we automatically detect which version to use but this can be harder to do when cross-compiling since you might not know the target CPU. You usually don't need to worry about it on Darwin x64, but it is relevant for Windows x64 and Linux x64. If you or your users see `"Illegal instruction"` errors, you might need to use the baseline version.
@@ -296,55 +294,6 @@ These flags currently cannot be used when cross-compiling because they depend on
{% /callout %}
## Code signing on macOS
To codesign a standalone executable on macOS (which fixes Gatekeeper warnings), use the `codesign` command.
```sh
$ codesign --deep --force -vvvv --sign "XXXXXXXXXX" ./myapp
```
We recommend including an `entitlements.plist` file with JIT permissions.
```xml#entitlements.plist
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>
```
To codesign with JIT support, pass the `--entitlements` flag to `codesign`.
```sh
$ codesign --deep --force -vvvv --sign "XXXXXXXXXX" --entitlements entitlements.plist ./myapp
```
After codesigning, verify the executable:
```sh
$ codesign -vvv --verify ./myapp
./myapp: valid on disk
./myapp: satisfies its Designated Requirement
```
{% callout %}
Codesign support requires Bun v1.2.4 or newer.
{% /callout %}
## Unsupported CLI arguments
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:

View File

@@ -1,50 +1,36 @@
Using `Bun.serve()`'s `routes` option, you can run your frontend and backend in the same app with no extra steps.
Using `Bun.serve()`'s `static` option, you can run your frontend and backend in the same app with no extra steps.
To get started, import HTML files and pass them to the `routes` option in `Bun.serve()`.
To get started, import HTML files and pass them to the `static` option in `Bun.serve()`.
```ts
import { sql, serve } from "bun";
import dashboard from "./dashboard.html";
import homepage from "./index.html";
const server = serve({
routes: {
// ** HTML imports **
// Bundle & route index.html to "/". This uses HTMLRewriter to scan the HTML for `<script>` and `<link>` tags, run's Bun's JavaScript & CSS bundler on them, transpiles any TypeScript, JSX, and TSX, downlevels CSS with Bun's CSS parser and serves the result.
const server = Bun.serve({
// Add HTML imports to `static`
static: {
// Bundle & route index.html to "/"
"/": homepage,
// Bundle & route dashboard.html to "/dashboard"
"/dashboard": dashboard,
// ** API endpoints ** (Bun v1.2.3+ required)
"/api/users": {
async GET(req) {
const users = await sql`SELECT * FROM users`;
return Response.json(users);
},
async POST(req) {
const { name, email } = await req.json();
const [user] =
await sql`INSERT INTO users (name, email) VALUES (${name}, ${email})`;
return Response.json(user);
},
},
"/api/users/:id": async req => {
const { id } = req.params;
const [user] = await sql`SELECT * FROM users WHERE id = ${id}`;
return Response.json(user);
},
},
// Enable development mode for:
// - Detailed error messages
// - Hot reloading (Bun v1.2.3+ required)
// - Rebuild on request
development: true,
// Prior to v1.2.3, the `fetch` option was used to handle all API requests. It is now optional.
// async fetch(req) {
// // Return 404 for unmatched routes
// return new Response("Not Found", { status: 404 });
// },
// Handle API requests
async fetch(req) {
// ...your API code
if (req.url.endsWith("/api/users")) {
const users = await Bun.sql`SELECT * FROM users`;
return Response.json(users);
}
// Return 404 for unmatched routes
return new Response("Not Found", { status: 404 });
},
});
console.log(`Listening on ${server.url}`);
@@ -69,7 +55,7 @@ These HTML files are used as routes in Bun's dev server you can pass to `Bun.ser
```ts
Bun.serve({
routes: {
static: {
"/": homepage,
"/dashboard": dashboard,
}
@@ -127,7 +113,7 @@ import dashboard from "../public/dashboard.html";
import { serve } from "bun";
serve({
routes: {
static: {
"/": dashboard,
},
@@ -185,7 +171,7 @@ import homepage from "./index.html";
import dashboard from "./dashboard.html";
Bun.serve({
routes: {
static: {
"/": homepage,
"/dashboard": dashboard,
}
@@ -263,8 +249,6 @@ plugins = ["./my-plugin-implementation.ts"]
Bun will lazily resolve and load each plugin and use them to bundle your routes.
Note: this is currently in `bunfig.toml` to make it possible to know statically which plugins are in use when we eventually integrate this with the `bun build` CLI. These plugins work in `Bun.build()`'s JS API, but are not yet supported in the CLI.
## How this works
Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `<script>` and `<link>` tags in HTML files, uses them as entrypoints for [Bun's bundler](/docs/bundler), generates an optimized bundle for the JavaScript/TypeScript/TSX/JSX and CSS files, and serves the result.
@@ -309,5 +293,5 @@ This works similarly to how [`Bun.build` processes HTML files](/docs/bundler/htm
## This is a work in progress
- ~Client-side hot reloading isn't wired up yet. It will be in the future.~ New in Bun v1.2.3
- Client-side hot reloading isn't wired up yet. It will be in the future.
- This doesn't support `bun build` yet. It also will in the future.

View File

@@ -301,6 +301,6 @@ This is a small wrapper around Bun's support for HTML imports in JavaScript.
### Adding a backend to your frontend
To add a backend to your frontend, you can use the `"routes"` option in `Bun.serve`.
To add a backend to your frontend, you can use the `"static"` option in `Bun.serve`.
Learn more in [the full-stack docs](/docs/bundler/fullstack).

View File

@@ -12,12 +12,9 @@ Options for the `pack` command:
- `--dry-run`: Perform all tasks except writing the tarball to disk.
- `--destination`: Specify the directory where the tarball will be saved.
- `--filename`: Specify an exact file name for the tarball to be saved at.
- `--ignore-scripts`: Skip running pre/postpack and prepare scripts.
- `--gzip-level`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
> Note `--filename` and `--destination` cannot be used at the same time
## bin
To print the path to the `bin` directory for the local project:

View File

@@ -70,7 +70,7 @@ In the Render UI, provide the following values during web service creation:
| ----------------- | ------------- |
| **Runtime** | `Node` |
| **Build Command** | `bun install` |
| **Start Command** | `bun app.ts` |
| **Start Command** | `bun app.js` |
---

View File

@@ -1,68 +0,0 @@
---
name: Extract links from a webpage using HTMLRewriter
---
## Extract links from a webpage
Bun's [HTMLRewriter](https://bun.sh/docs/api/html-rewriter) API can be used to efficiently extract links from HTML content. It works by chaining together CSS selectors to match the elements, text, and attributes you want to process. This is a simple example of how to extract links from a webpage. You can pass `.transform` a `Response`, `Blob`, or `string`.
```ts
async function extractLinks(url: string) {
const links = new Set<string>();
const response = await fetch(url);
const rewriter = new HTMLRewriter().on("a[href]", {
element(el) {
const href = el.getAttribute("href");
if (href) {
links.add(href);
}
},
});
// Wait for the response to be processed
await rewriter.transform(response).blob();
console.log([...links]); // ["https://bun.sh", "/docs", ...]
}
// Extract all links from the Bun website
await extractLinks("https://bun.sh");
```
---
## Convert relative URLs to absolute
When scraping websites, you often want to convert relative URLs (like `/docs`) to absolute URLs. Here's how to handle URL resolution:
```ts
async function extractLinksFromURL(url: string) {
const response = await fetch(url);
const links = new Set<string>();
const rewriter = new HTMLRewriter().on("a[href]", {
element(el) {
const href = el.getAttribute("href");
if (href) {
// Convert relative URLs to absolute
try {
const absoluteURL = new URL(href, url).href;
links.add(absoluteURL);
} catch {
links.add(href);
}
}
},
});
// Wait for the response to be processed
await rewriter.transform(response).blob();
return [...links];
}
const websiteLinks = await extractLinksFromURL("https://example.com");
```
---
See [Docs > API > HTMLRewriter](https://bun.sh/docs/api/html-rewriter) for complete documentation on HTML transformation with Bun.

View File

@@ -1,93 +0,0 @@
---
name: Extract social share images and Open Graph tags
---
## Extract social share images and Open Graph tags
Bun's [HTMLRewriter](https://bun.sh/docs/api/html-rewriter) API can be used to efficiently extract social share images and Open Graph metadata from HTML content. This is particularly useful for building link preview features, social media cards, or web scrapers. We can use HTMLRewriter to match CSS selectors to HTML elements, text, and attributes we want to process.
```ts
interface SocialMetadata {
title?: string;
description?: string;
image?: string;
url?: string;
siteName?: string;
type?: string;
}
async function extractSocialMetadata(url: string): Promise<SocialMetadata> {
const metadata: SocialMetadata = {};
const response = await fetch(url);
const rewriter = new HTMLRewriter()
// Extract Open Graph meta tags
.on('meta[property^="og:"]', {
element(el) {
const property = el.getAttribute("property");
const content = el.getAttribute("content");
if (property && content) {
// Convert "og:image" to "image" etc.
const key = property.replace("og:", "") as keyof SocialMetadata;
metadata[key] = content;
}
},
})
// Extract Twitter Card meta tags as fallback
.on('meta[name^="twitter:"]', {
element(el) {
const name = el.getAttribute("name");
const content = el.getAttribute("content");
if (name && content) {
const key = name.replace("twitter:", "") as keyof SocialMetadata;
// Only use Twitter Card data if we don't have OG data
if (!metadata[key]) {
metadata[key] = content;
}
}
},
})
// Fallback to regular meta tags
.on('meta[name="description"]', {
element(el) {
const content = el.getAttribute("content");
if (content && !metadata.description) {
metadata.description = content;
}
},
})
// Fallback to title tag
.on("title", {
text(text) {
if (!metadata.title) {
metadata.title = text.text;
}
},
});
// Process the response
await rewriter.transform(response).blob();
// Convert relative image URLs to absolute
if (metadata.image && !metadata.image.startsWith("http")) {
try {
metadata.image = new URL(metadata.image, url).href;
} catch {
// Keep the original URL if parsing fails
}
}
return metadata;
}
// Example usage
const metadata = await extractSocialMetadata("https://bun.sh");
console.log(metadata);
// {
// title: "Bun — A fast all-in-one JavaScript runtime",
// description: "Bundle, transpile, install and run JavaScript & TypeScript projects — all in Bun. Bun is a fast all-in-one JavaScript runtime & toolkit designed for speed, complete with a bundler, test runner, and Node.js-compatible package manager.",
// image: "https://bun.sh/share.jpg",
// type: "website",
// ...
// }
```

View File

@@ -1,4 +0,0 @@
{
"name": "HTMLRewriter",
"description": "A collection of guides for using the HTMLRewriter streaming HTML parser with Bun"
}

View File

@@ -1,56 +0,0 @@
---
name: Codesign a single-file JavaScript executable on macOS
description: Fix the "can't be opened because it is from an unidentified developer" Gatekeeper warning when running your JavaScript executable.
---
Compile your executable using the `--compile` flag.
```sh
$ bun build --compile ./path/to/entry.ts --outfile myapp
```
---
List your available signing identities. One of these will be your signing identity that you pass to the `codesign` command. This command requires macOS.
```sh
$ security find-identity -v -p codesigning
1. XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX "Developer ID Application: Your Name (ZZZZZZZZZZ)"
1 valid identities found
```
---
Optional, but recommended: create an `entitlements.plist` file with the necessary permissions for the JavaScript engine to work correctly.
```xml#entitlements.plist
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>
```
---
Sign your executable using the `codesign` command and verify it works.
```bash
$ codesign --entitlements entitlements.plist -vvvv --deep --sign "XXXXXXXXXX" ./myapp --force
$ codesign -vvv --verify ./myapp
```
---
For more information on macOS codesigning, refer to [Apple's Code Signing documentation](https://developer.apple.com/documentation/security/code_signing_services). For details about creating single-file executables with Bun, see [Standalone Executables](/docs/bundler/executables). This guide requires Bun v1.2.4 or newer.

View File

@@ -28,21 +28,10 @@ BAR=world
Variables can also be set via the command line.
{% codetabs %}
```sh#Linux/macOS
```sh
$ FOO=helloworld bun run dev
```
```sh#Windows
# Using CMD
$ set FOO=helloworld && bun run dev
# Using PowerShell
$ $env:FOO="helloworld"; bun run dev
```
{% /codetabs %}
---
See [Docs > Runtime > Environment variables](https://bun.sh/docs/runtime/env) for more information on using environment variables with Bun.

View File

@@ -8,7 +8,7 @@ VSCode extension support is currently buggy. We recommend the [Web Debugger](htt
{% /note %}
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-inspector-protocol/src/protocol/jsc/index.d.ts) so you can debug your code with an interactive debugger.
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-vscode/types/jsc.d.ts) so you can debug your code with an interactive debugger.
---

View File

@@ -1,5 +1,7 @@
Running `bun install` will create a lockfile called `bun.lock`.
https://bun.sh/blog/bun-lock-text-lockfile
#### Should it be committed to git?
Yes

View File

@@ -63,20 +63,6 @@ $ bun install --filter "pkg-*" --filter "!pkg-c"
$ bun install --filter "./packages/pkg-*" --filter "!pkg-c" # or --filter "!./packages/pkg-c"
```
When publishing, `workspace:` versions are replaced by the package's `package.json` version,
```
"workspace:*" -> "1.0.1"
"workspace:^" -> "^1.0.1"
"workspace:~" -> "~1.0.1"
```
Setting a specific version takes precedence over the package's `package.json` version,
```
"workspace:1.0.2" -> "1.0.2" // Even if current version is 1.0.1
```
Workspaces have a couple major benefits.
- **Code can be split into logical parts.** If one package relies on another, you can simply add it as a dependency in `package.json`. If package `b` depends on `a`, `bun install` will install your local `packages/a` directory into `node_modules` instead of downloading it from the npm registry.

View File

@@ -147,7 +147,6 @@ If the command runs successfully but `bun --version` is not recognized, it means
[System.EnvironmentVariableTarget]::User
)
```
After running the command, restart your terminal and test with `bun --version`
{% /details %}
@@ -220,12 +219,11 @@ For convenience, here are download links for the latest version:
- [`bun-linux-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64.zip)
- [`bun-linux-aarch64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64-musl.zip)
- [`bun-darwin-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64.zip)
- [`bun-darwin-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64-baseline.zip)
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
Bun's `x64` binaries target the Haswell CPU architecture, which means they require AVX and AVX2 instructions. For Linux and Windows, the `x64-baseline` binaries are also available which target the Nehalem architecture. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
Bun also publishes `darwin-x64-baseline` binaries, but these are just a copy of the `darwin-x64` ones so they still have the same CPU requirement. We only maintain these since some tools expect them to exist. Bun requires macOS 13.0 or later, which does not support any CPUs that don't meet our requirement.
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
<!--
## Native

View File

@@ -2,7 +2,7 @@
name: Debugging
---
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-inspector-protocol/src/protocol/jsc/index.d.ts), so you can debug your code with an interactive debugger. For demonstration purposes, consider the following simple web server.
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-types/jsc.d.ts), so you can debug your code with an interactive debugger. For demonstration purposes, consider the following simple web server.
## Debugging JavaScript and TypeScript

View File

@@ -15,40 +15,10 @@ BAR=world
Variables can also be set via the command line.
{% codetabs %}
```sh#Linux/macOS
```sh
$ FOO=helloworld bun run dev
```
```sh#Windows
# Using CMD
$ set FOO=helloworld && bun run dev
# Using PowerShell
$ $env:FOO="helloworld"; bun run dev
```
{% /codetabs %}
{% details summary="Cross-platform solution with Windows" %}
For a cross-platform solution, you can use [bun shell](https://bun.sh/docs/runtime/shell). For example, the `bun exec` command.
```sh
$ bun exec 'FOO=helloworld bun run dev'
```
On Windows, `package.json` scripts called with `bun run` will automatically use the **bun shell**, making the following also cross-platform.
```json#package.json
"scripts": {
"dev": "NODE_ENV=development bun --watch app.ts",
},
```
{% /details %}
Or programmatically by assigning a property to `process.env`.
```ts

View File

@@ -142,7 +142,7 @@ Some methods are not optimized yet.
### [`node:util`](https://nodejs.org/api/util.html)
🟡 Missing `getCallSite` `getCallSites` `getSystemErrorMap` `getSystemErrorMessage` `transferableAbortSignal` `transferableAbortController` `MIMEType` `MIMEParams`
🟡 Missing `MIMEParams` `MIMEType` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
### [`node:v8`](https://nodejs.org/api/v8.html)

View File

@@ -20,7 +20,7 @@ await $`cat < ${response} | wc -c`; // 1256
- **Safety**: Bun Shell escapes all strings by default, preventing shell injection attacks.
- **JavaScript interop**: Use `Response`, `ArrayBuffer`, `Blob`, `Bun.file(path)` and other JavaScript objects as stdin, stdout, and stderr.
- **Shell scripting**: Bun Shell can be used to run shell scripts (`.bun.sh` files).
- **Custom interpreter**: Bun Shell is written in Zig, along with its lexer, parser, and interpreter. Bun Shell is a small programming language.
- **Custom interpreter**: Bun Shell is written in Zig, along with it's lexer, parser, and interpreter. Bun Shell is a small programming language.
## Getting started

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.2.5",
"version": "1.2.3",
"workspaces": [
"./packages/bun-types"
],
@@ -53,7 +53,7 @@
"lint:fix": "oxlint --config oxlint.json --fix",
"test": "node scripts/runner.node.mjs --exec-path ./build/debug/bun-debug",
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",
"banned": "bun test test/internal/ban-words.test.ts",
"banned": "bun packages/bun-internal-test/src/linter.ts",
"zig": "vendor/zig/zig.exe",
"zig:fmt": "bun run zig-format",
"zig:check": "bun run zig build check --summary new",

View File

@@ -0,0 +1,18 @@
{
" != undefined": "This is by definition Undefined Behavior.",
" == undefined": "This is by definition Undefined Behavior.",
"@import(\"root\").bun.": "Only import 'bun' once",
"std.debug.assert": "Use bun.assert instead",
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
"std.debug.print": "Don't let this be committed",
"std.mem.indexOfAny(u8": "Use bun.strings.indexOfAny",
"undefined != ": "This is by definition Undefined Behavior.",
"undefined == ": "This is by definition Undefined Behavior.",
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
"std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`",
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
"std.enums.tagName(": "Use bun.tagName instead",
"": ""
}

View File

@@ -0,0 +1,70 @@
import { $ } from "bun";
import BANNED from "./banned.json";
import * as action from "@actions/core";
const IGNORED_FOLDERS = [
// list of folders to ignore
"windows-shim",
];
const ci = !!process.env["GITHUB_ACTIONS"];
process.chdir(require("path").join(import.meta.dir, "../../../"));
let bad = [];
let report = "";
const write = (text: string) => {
process.stdout.write(text);
report += text;
};
for (const [banned, suggestion] of Object.entries(BANNED)) {
if (banned.length === 0) continue;
// Run git grep to find occurrences of std.debug.assert in .zig files
// .nothrow() is here since git will exit with non-zero if no matches are found.
let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text();
stdout = stdout.trim();
if (stdout.length === 0) continue;
let lines = stdout.split("\n");
// Parse each line to extract filename and line number
const matches = lines
.filter(line => !IGNORED_FOLDERS.some(folder => line.includes(folder)))
.map(line => {
const [path, lineNumber, ...text] = line.split(":");
return { path, lineNumber, banned, suggestion, text: text.join(":") };
});
// Check if we got any output
// Split the output into lines
if (matches.length === 0) continue;
write(`Banned **'${banned}'** found in the following locations:` + "\n");
matches.forEach(match => {
write(`${match.path}:${match.lineNumber}: ${match.text.trim()}` + "\n");
});
bad = bad.concat(matches);
}
if (report.length === 0) {
process.exit(0);
}
function link({ path, lineNumber, suggestion, banned }) {
action.error(`Lint failure: ${banned} is banned, ${suggestion}`, {
file: path,
startLine: Number(lineNumber),
endLine: Number(lineNumber),
});
return `[\`${path}:${lineNumber}\`](https://github.com/oven-sh/bun/blob/${process.env.GITHUB_SHA}/${path}#L${lineNumber})`;
}
if (ci) {
if (report.length > 0) {
action.setFailed(`${bad.length} lint failures`);
}
action.setOutput("count", bad.length);
action.setOutput("text_output", bad.map(m => `- ${link(m)}: ${m.banned} is banned, ${m.suggestion}`).join("\n"));
action.setOutput("json_output", JSON.stringify(bad));
action.summary.addRaw(report);
await action.summary.write();
}
process.exit(1);

View File

@@ -455,6 +455,55 @@ declare module "bun" {
}
const TOML: TOML;
type Serve<WebSocketDataType = undefined> =
| ServeOptions
| TLSServeOptions
| UnixServeOptions
| UnixTLSServeOptions
| WebSocketServeOptions<WebSocketDataType>
| TLSWebSocketServeOptions<WebSocketDataType>
| UnixWebSocketServeOptions<WebSocketDataType>
| UnixTLSWebSocketServeOptions<WebSocketDataType>;
/**
* Start a fast HTTP server.
*
* @param options Server options (port defaults to $PORT || 3000)
*
* -----
*
* @example
*
* ```ts
* Bun.serve({
* fetch(req: Request): Response | Promise<Response> {
* return new Response("Hello World!");
* },
*
* // Optional port number - the default value is 3000
* port: process.env.PORT || 3000,
* });
* ```
* -----
*
* @example
*
* Send a file
*
* ```ts
* Bun.serve({
* fetch(req: Request): Response | Promise<Response> {
* return new Response(Bun.file("./package.json"));
* },
*
* // Optional port number - the default value is 3000
* port: process.env.PORT || 3000,
* });
* ```
*/
// eslint-disable-next-line @definitelytyped/no-unnecessary-generics
function serve<T>(options: Serve<T>): Server;
/**
* Synchronously resolve a `moduleId` as though it were imported from `parent`
*
@@ -1333,18 +1382,6 @@ declare module "bun" {
*/
endpoint?: string;
/**
* Use virtual hosted style endpoint. default to false, when true if `endpoint` is informed it will ignore the `bucket`
*
* @example
* // Using virtual hosted style
* const file = s3("my-file.txt", {
* virtualHostedStyle: true,
* endpoint: "https://my-bucket.s3.us-east-1.amazonaws.com"
* });
*/
virtualHostedStyle?: boolean;
/**
* The size of each part in multipart uploads (in bytes).
* - Minimum: 5 MiB
@@ -1984,9 +2021,9 @@ declare module "bun" {
/** Database user for authentication (alias for username) */
user?: string;
/** Database password for authentication */
password?: string | (() => Promise<string>);
password?: string;
/** Database password for authentication (alias for password) */
pass?: string | (() => Promise<string>);
pass?: string;
/** Name of the database to connect to */
database?: string;
/** Name of the database to connect to (alias for database) */
@@ -2017,8 +2054,6 @@ declare module "bun" {
max?: number;
/** By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts. */
bigint?: boolean;
/** Automatic creation of prepared statements, defaults to true */
prepare?: boolean;
};
/**
@@ -2032,8 +2067,6 @@ declare module "bun" {
cancelled: boolean;
/** Cancels the executing query */
cancel(): SQLQuery;
/** Execute as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons */
simple(): SQLQuery;
/** Executes the query */
execute(): SQLQuery;
/** Returns the raw query result */
@@ -2263,13 +2296,6 @@ declare module "bun" {
* const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`)
*/
unsafe(string: string, values?: any[]): SQLQuery;
/**
* Reads a file and uses the contents as a query.
* Optional parameters can be used if the file includes $1, $2, etc
* @example
* const result = await sql.file("query.sql", [1, 2, 3]);
*/
file(filename: string, values?: any[]): SQLQuery;
/** Current client options */
options: SQLOptions;
@@ -3643,30 +3669,6 @@ declare module "bun" {
};
}
namespace RouterTypes {
type ExtractRouteParams<T> = T extends `${string}:${infer Param}/${infer Rest}`
? { [K in Param]: string } & ExtractRouteParams<Rest>
: T extends `${string}:${infer Param}`
? { [K in Param]: string }
: T extends `${string}*`
? {}
: {};
type RouteHandler<T extends string> = (req: BunRequest<T>, server: Server) => Response | Promise<Response>;
type HTTPMethod = "GET" | "POST" | "PUT" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS";
type RouteHandlerObject<T extends string> = {
[K in HTTPMethod]?: RouteHandler<T>;
};
type RouteValue<T extends string> = Response | false | RouteHandler<T> | RouteHandlerObject<T>;
}
interface BunRequest<T extends string = string> extends Request {
params: RouterTypes.ExtractRouteParams<T>;
}
interface GenericServeOptions {
/**
* What URI should be used to make {@link Request.url} absolute?
@@ -3728,6 +3730,26 @@ declare module "bun" {
* This string will currently do nothing. But in the future it could be useful for logs or metrics.
*/
id?: string | null;
/**
* Server static Response objects by route.
*
* @example
* ```ts
* Bun.serve({
* static: {
* "/": new Response("Hello World"),
* "/about": new Response("About"),
* },
* fetch(req) {
* return new Response("Fallback response");
* },
* });
* ```
*
* @experimental
*/
static?: Record<`/${string}`, Response>;
}
interface ServeOptions extends GenericServeOptions {
@@ -4104,24 +4126,7 @@ declare module "bun" {
*
* Passing other options such as `port` or `hostname` won't do anything.
*/
reload<T, R extends { [K in keyof R]: RouterTypes.RouteValue<K & string> }>(
options: (
| (Omit<ServeOptions, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (Omit<ServeOptions, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| WebSocketServeOptions<T>
) & {
/**
* @deprecated Use `routes` instead in new code. This will continue to work for awhile though.
*/
static?: R;
},
): Server;
reload(options: Serve): void;
/**
* Mock the fetch handler for a running server.
@@ -4319,198 +4324,6 @@ declare module "bun" {
readonly id: string;
}
type Serve<WebSocketDataType = undefined> =
| ServeOptions
| TLSServeOptions
| UnixServeOptions
| UnixTLSServeOptions
| WebSocketServeOptions<WebSocketDataType>
| TLSWebSocketServeOptions<WebSocketDataType>
| UnixWebSocketServeOptions<WebSocketDataType>
| UnixTLSWebSocketServeOptions<WebSocketDataType>;
/**
Bun.serve provides a high-performance HTTP server with built-in routing support.
It enables both function-based and object-based route handlers with type-safe
parameters and method-specific handling.
@example Basic Usage
```ts
Bun.serve({
port: 3000,
fetch(req) {
return new Response("Hello World");
}
});
```
@example Route-based Handlers
```ts
Bun.serve({
routes: {
// Static responses
"/": new Response("Home page"),
// Function handlers with type-safe parameters
"/users/:id": (req) => {
// req.params.id is typed as string
return new Response(`User ${req.params.id}`);
},
// Method-specific handlers
"/api/posts": {
GET: () => new Response("Get posts"),
POST: async (req) => {
const body = await req.json();
return new Response("Created post");
},
DELETE: (req) => new Response("Deleted post")
},
// Wildcard routes
"/static/*": (req) => {
// Handle any path under /static/
return new Response("Static file");
},
// Disable route (fall through to fetch handler)
"/api/legacy": false
},
// Fallback handler for unmatched routes
fetch(req) {
return new Response("Not Found", { status: 404 });
}
});
```
@example Path Parameters
```ts
Bun.serve({
routes: {
// Single parameter
"/users/:id": (req: BunRequest<"/users/:id">) => {
return new Response(`User ID: ${req.params.id}`);
},
// Multiple parameters
"/posts/:postId/comments/:commentId": (
req: BunRequest<"/posts/:postId/comments/:commentId">
) => {
return new Response(JSON.stringify(req.params));
// Output: {"postId": "123", "commentId": "456"}
}
}
});
```
@example Route Precedence
```ts
// Routes are matched in the following order:
// 1. Exact static routes ("/about")
// 2. Parameter routes ("/users/:id")
// 3. Wildcard routes ("/api/*")
Bun.serve({
routes: {
"/api/users": () => new Response("Users list"),
"/api/users/:id": (req) => new Response(`User ${req.params.id}`),
"/api/*": () => new Response("API catchall"),
"/*": () => new Response("Root catchall")
}
});
```
@example Error Handling
```ts
Bun.serve({
routes: {
"/error": () => {
throw new Error("Something went wrong");
}
},
error(error) {
// Custom error handler
console.error(error);
return new Response(`Error: ${error.message}`, {
status: 500
});
}
});
```
@example Server Lifecycle
```ts
const server = Bun.serve({
// Server config...
});
// Update routes at runtime
server.reload({
routes: {
"/": () => new Response("Updated route")
}
});
// Stop the server
server.stop();
```
@example Development Mode
```ts
Bun.serve({
development: true, // Enable hot reloading
routes: {
// Routes will auto-reload on changes
}
});
```
@example Type-Safe Request Handling
```ts
type Post = {
id: string;
title: string;
};
Bun.serve({
routes: {
"/api/posts/:id": async (
req: BunRequest<"/api/posts/:id">
) => {
if (req.method === "POST") {
const body: Post = await req.json();
return Response.json(body);
}
return new Response("Method not allowed", {
status: 405
});
}
}
});
```
@param options - Server configuration options
@param options.routes - Route definitions mapping paths to handlers
*/
function serve<T, R extends { [K in keyof R]: RouterTypes.RouteValue<K & string> }>(
options: (
| (Omit<ServeOptions, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (Omit<ServeOptions, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| WebSocketServeOptions<T>
) & {
/**
* @deprecated Use `routes` instead in new code. This will continue to work for awhile though.
*/
static?: R;
},
): Server;
/**
* [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) powered by the fastest system calls available for operating on files.
*
@@ -5536,14 +5349,12 @@ declare module "bun" {
* },
* });
* ```
*
* @returns `this` for method chaining
*/
onStart(callback: OnStartCallback): this;
onStart(callback: OnStartCallback): void;
onBeforeParse(
constraints: PluginConstraints,
callback: { napiModule: unknown; symbol: string; external?: unknown | undefined },
): this;
): void;
/**
* Register a callback to load imports with a specific import specifier
* @param constraints The constraints to apply the plugin to
@@ -5558,10 +5369,8 @@ declare module "bun" {
* },
* });
* ```
*
* @returns `this` for method chaining
*/
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): this;
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void;
/**
* Register a callback to resolve imports matching a filter and/or namespace
* @param constraints The constraints to apply the plugin to
@@ -5576,10 +5385,8 @@ declare module "bun" {
* },
* });
* ```
*
* @returns `this` for method chaining
*/
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): this;
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void;
/**
* The config object passed to `Bun.build` as is. Can be mutated.
*/
@@ -5610,10 +5417,8 @@ declare module "bun" {
* const { foo } = require("hello:world");
* console.log(foo); // "bar"
* ```
*
* @returns `this` for method chaining
*/
module(specifier: string, callback: () => OnLoadResult | Promise<OnLoadResult>): this;
module(specifier: string, callback: () => OnLoadResult | Promise<OnLoadResult>): void;
}
interface BunPlugin {

View File

@@ -408,7 +408,7 @@ static int bsd_socket_set_membership6(LIBUS_SOCKET_DESCRIPTOR fd, const struct s
mreq.ipv6mr_interface = iface->sin6_scope_id;
}
int option = drop ? IPV6_LEAVE_GROUP : IPV6_JOIN_GROUP;
return setsockopt(fd, IPPROTO_IPV6, option, &mreq, sizeof(mreq));
return setsockopt(fd, IPPROTO_IP, option, &mreq, sizeof(mreq));
}
int bsd_socket_set_membership(LIBUS_SOCKET_DESCRIPTOR fd, const struct sockaddr_storage *addr, const struct sockaddr_storage *iface, int drop) {

View File

@@ -326,18 +326,33 @@ int us_internal_ssl_socket_is_closed(struct us_internal_ssl_socket_t *s) {
return us_socket_is_closed(0, &s->s);
}
struct us_internal_ssl_socket_t *
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
void *reason) {
void us_internal_trigger_handshake_callback_econnreset(struct us_internal_ssl_socket_t *s) {
struct us_internal_ssl_socket_context_t *context =
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
// always set the handshake state to completed
s->handshake_state = HANDSHAKE_COMPLETED;
if (context->on_handshake != NULL) {
struct us_bun_verify_error_t verify_error = (struct us_bun_verify_error_t){ .error = -46, .code = "ECONNRESET", .reason = "Client network socket disconnected before secure TLS connection was established"};
context->on_handshake(s, 0, verify_error, context->handshake_data);
// check if we are already closed
if (us_internal_ssl_socket_is_closed(s)) return s;
if (s->handshake_state != HANDSHAKE_COMPLETED) {
// if we have some pending handshake we cancel it and try to check the
// latest handshake error this way we will always call on_handshake with the
// latest error before closing this should always call
// secureConnection/secure before close if we remove this here, we will need
// to do this check on every on_close event on sockets, fetch etc and will
// increase complexity on a lot of places
us_internal_trigger_handshake_callback(s, 0);
}
// if we are in the middle of a close_notify we need to finish it (code != 0 forces a fast shutdown)
int can_close = us_internal_handle_shutdown(s, code != 0);
// only close the socket if we are not in the middle of a handshake
if(can_close) {
return (struct us_internal_ssl_socket_t *)us_socket_close(0, (struct us_socket_t *)s, code, reason);
}
return s;
}
void us_internal_trigger_handshake_callback(struct us_internal_ssl_socket_t *s,
int success) {
struct us_internal_ssl_socket_context_t *context =
@@ -351,32 +366,6 @@ void us_internal_trigger_handshake_callback(struct us_internal_ssl_socket_t *s,
context->on_handshake(s, success, verify_error, context->handshake_data);
}
}
struct us_internal_ssl_socket_t *
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
void *reason) {
// check if we are already closed
if (us_internal_ssl_socket_is_closed(s)) return s;
us_internal_update_handshake(s);
if (s->handshake_state != HANDSHAKE_COMPLETED) {
// if we have some pending handshake we cancel it and try to check the
// latest handshake error this way we will always call on_handshake with the
// ECONNRESET error if we remove this here, we will need
// to do this check on every on_close event on sockets, fetch etc and will
// increase complexity on a lot of places
us_internal_trigger_handshake_callback_econnreset(s);
}
// if we are in the middle of a close_notify we need to finish it (code != 0 forces a fast shutdown)
int can_close = us_internal_handle_shutdown(s, code != 0);
// only close the socket if we are not in the middle of a handshake
if(can_close) {
return (struct us_internal_ssl_socket_t *)us_socket_close(0, (struct us_socket_t *)s, code, reason);
}
return s;
}
int us_internal_ssl_renegotiate(struct us_internal_ssl_socket_t *s) {
// handle renegotation here since we are using ssl_renegotiate_explicit
@@ -1778,9 +1767,9 @@ int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
loop_ssl_data->ssl_socket = &s->s;
loop_ssl_data->msg_more = msg_more;
loop_ssl_data->last_write_was_msg_more = 0;
int written = SSL_write(s->ssl, data, length);
loop_ssl_data->msg_more = 0;
if (loop_ssl_data->last_write_was_msg_more && !msg_more) {
us_socket_flush(0, &s->s);
}

View File

@@ -457,7 +457,6 @@ struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_clien
return s;
}
int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more) {
#ifndef LIBUS_NO_SSL
if (ssl) {

View File

@@ -167,7 +167,7 @@ export function registerTestRunner(context: vscode.ExtensionContext) {
let command = customScript;
if (filePath.length !== 0) {
command += ` "${filePath}"`;
command += ` ${filePath}`;
}
if (testName && testName.length) {

View File

@@ -1,919 +0,0 @@
diff --git a/include/ncrypto.h b/include/ncrypto.h
index be9e0ca..f8000de 100644
--- a/include/ncrypto.h
+++ b/include/ncrypto.h
@@ -1,5 +1,15 @@
#pragma once
+#include "root.h"
+
+#ifdef ASSERT_ENABLED
+#define NCRYPTO_DEVELOPMENT_CHECKS 1
+#endif
+
+#include <wtf/text/WTFString.h>
+#include <wtf/text/StringView.h>
+#include <wtf/Function.h>
+
#include <openssl/bio.h>
#include <openssl/bn.h>
#include <openssl/dh.h>
@@ -61,30 +71,11 @@ namespace ncrypto {
#if NCRYPTO_DEVELOPMENT_CHECKS
#define NCRYPTO_STR(x) #x
-#define NCRYPTO_REQUIRE(EXPR) \
- { \
- if (!(EXPR) { abort(); }) }
-
-#define NCRYPTO_FAIL(MESSAGE) \
- do { \
- std::cerr << "FAIL: " << (MESSAGE) << std::endl; \
- abort(); \
- } while (0);
-#define NCRYPTO_ASSERT_EQUAL(LHS, RHS, MESSAGE) \
- do { \
- if (LHS != RHS) { \
- std::cerr << "Mismatch: '" << LHS << "' - '" << RHS << "'" << std::endl; \
- NCRYPTO_FAIL(MESSAGE); \
- } \
- } while (0);
-#define NCRYPTO_ASSERT_TRUE(COND) \
- do { \
- if (!(COND)) { \
- std::cerr << "Assert at line " << __LINE__ << " of file " << __FILE__ \
- << std::endl; \
- NCRYPTO_FAIL(NCRYPTO_STR(COND)); \
- } \
- } while (0);
+#define NCRYPTO_REQUIRE(EXPR) ASSERT_WITH_MESSAGE(EXPR, "Assertion failed")
+#define NCRYPTO_FAIL(MESSAGE) ASSERT_WITH_MESSAGE(false, MESSAGE)
+#define NCRYPTO_ASSERT_EQUAL(LHS, RHS, MESSAGE) \
+ ASSERT_WITH_MESSAGE(LHS == RHS, MESSAGE)
+#define NCRYPTO_ASSERT_TRUE(COND) ASSERT_WITH_MESSAGE(COND, NCRYPTO_STR(COND))
#else
#define NCRYPTO_FAIL(MESSAGE)
#define NCRYPTO_ASSERT_EQUAL(LHS, RHS, MESSAGE)
@@ -131,9 +122,9 @@ class CryptoErrorList final {
void capture();
// Add an error message to the end of the stack.
- void add(std::string message);
+ void add(WTF::String message);
- inline const std::string& peek_back() const { return errors_.back(); }
+ inline const WTF::String& peek_back() const { return errors_.back(); }
inline size_t size() const { return errors_.size(); }
inline bool empty() const { return errors_.empty(); }
@@ -142,11 +133,11 @@ class CryptoErrorList final {
inline auto rbegin() const noexcept { return errors_.rbegin(); }
inline auto rend() const noexcept { return errors_.rend(); }
- std::optional<std::string> pop_back();
- std::optional<std::string> pop_front();
+ std::optional<WTF::String> pop_back();
+ std::optional<WTF::String> pop_front();
private:
- std::list<std::string> errors_;
+ std::list<WTF::String> errors_;
};
// Forcibly clears the error stack on destruction. This stops stale errors
@@ -277,12 +268,12 @@ class Cipher final {
int getIvLength() const;
int getKeyLength() const;
int getBlockSize() const;
- std::string_view getModeLabel() const;
- std::string_view getName() const;
+ WTF::ASCIILiteral getModeLabel() const;
+ WTF::String getName() const;
bool isSupportedAuthenticatedMode() const;
- static const Cipher FromName(std::string_view name);
+ static const Cipher FromName(WTF::StringView name);
static const Cipher FromNid(int nid);
static const Cipher FromCtx(const CipherCtxPointer& ctx);
@@ -336,6 +327,8 @@ class Dsa final {
};
class BignumPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(BignumPointer);
+
public:
BignumPointer() = default;
explicit BignumPointer(BIGNUM* bignum);
@@ -429,8 +422,8 @@ class Rsa final {
const BIGNUM* qi;
};
struct PssParams {
- std::string_view digest = "sha1";
- std::optional<std::string_view> mgf1_digest = "sha1";
+ WTF::StringView digest = "sha1"_s;
+ std::optional<WTF::StringView> mgf1_digest = "sha1"_s;
int64_t salt_length = 20;
};
@@ -465,7 +458,7 @@ class Ec final {
const EC_GROUP* getGroup() const;
int getCurve() const;
uint32_t getDegree() const;
- std::string getCurveName() const;
+ WTF::String getCurveName() const;
const EC_POINT* getPublicKey() const;
const BIGNUM* getPrivateKey() const;
@@ -535,13 +528,15 @@ class DataPointer final {
};
class BIOPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(BIOPointer);
+
public:
static BIOPointer NewMem();
static BIOPointer NewSecMem();
static BIOPointer New(const BIO_METHOD* method);
static BIOPointer New(const void* data, size_t len);
static BIOPointer New(const BIGNUM* bn);
- static BIOPointer NewFile(std::string_view filename, std::string_view mode);
+ static BIOPointer NewFile(WTF::StringView filename, WTF::StringView mode);
static BIOPointer NewFp(FILE* fd, int flags);
template <typename T>
@@ -575,7 +570,7 @@ class BIOPointer final {
bool resetBio() const;
- static int Write(BIOPointer* bio, std::string_view message);
+ static int Write(BIOPointer* bio, WTF::StringView message);
template <typename... Args>
static void Printf(BIOPointer* bio, const char* format, Args... args) {
@@ -588,6 +583,8 @@ class BIOPointer final {
};
class CipherCtxPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(CipherCtxPointer);
+
public:
static CipherCtxPointer New();
@@ -630,6 +627,8 @@ class CipherCtxPointer final {
};
class EVPKeyCtxPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(EVPKeyCtxPointer);
+
public:
EVPKeyCtxPointer();
explicit EVPKeyCtxPointer(EVP_PKEY_CTX* ctx);
@@ -697,6 +696,8 @@ class EVPKeyCtxPointer final {
};
class EVPKeyPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(EVPKeyPointer);
+
public:
static EVPKeyPointer New();
static EVPKeyPointer NewRawPublic(int id,
@@ -821,6 +822,8 @@ class EVPKeyPointer final {
};
class DHPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(DHPointer);
+
public:
enum class FindGroupOption {
NONE,
@@ -833,9 +836,9 @@ class DHPointer final {
static BignumPointer GetStandardGenerator();
static BignumPointer FindGroup(
- const std::string_view name,
+ const WTF::StringView name,
FindGroupOption option = FindGroupOption::NONE);
- static DHPointer FromGroup(const std::string_view name,
+ static DHPointer FromGroup(const WTF::StringView name,
FindGroupOption option = FindGroupOption::NONE);
static DHPointer New(BignumPointer&& p, BignumPointer&& g);
@@ -910,6 +913,8 @@ struct StackOfX509Deleter {
using StackOfX509 = std::unique_ptr<STACK_OF(X509), StackOfX509Deleter>;
class SSLCtxPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(SSLCtxPointer);
+
public:
SSLCtxPointer() = default;
explicit SSLCtxPointer(SSL_CTX* ctx);
@@ -943,6 +948,8 @@ class SSLCtxPointer final {
};
class SSLPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(SSLPointer);
+
public:
SSLPointer() = default;
explicit SSLPointer(SSL* ssl);
@@ -961,31 +968,33 @@ class SSLPointer final {
bool setSession(const SSLSessionPointer& session);
bool setSniContext(const SSLCtxPointer& ctx) const;
- const std::string_view getClientHelloAlpn() const;
- const std::string_view getClientHelloServerName() const;
+ const WTF::StringView getClientHelloAlpn() const;
+ const WTF::StringView getClientHelloServerName() const;
- std::optional<const std::string_view> getServerName() const;
+ std::optional<const WTF::String> getServerName() const;
X509View getCertificate() const;
EVPKeyPointer getPeerTempKey() const;
const SSL_CIPHER* getCipher() const;
bool isServer() const;
- std::optional<std::string_view> getCipherName() const;
- std::optional<std::string_view> getCipherStandardName() const;
- std::optional<std::string_view> getCipherVersion() const;
+ std::optional<WTF::StringView> getCipherName() const;
+ std::optional<WTF::StringView> getCipherStandardName() const;
+ std::optional<WTF::StringView> getCipherVersion() const;
std::optional<uint32_t> verifyPeerCertificate() const;
- void getCiphers(std::function<void(const std::string_view)> cb) const;
+ void getCiphers(WTF::Function<void(const WTF::StringView)>&& cb) const;
static SSLPointer New(const SSLCtxPointer& ctx);
- static std::optional<const std::string_view> GetServerName(const SSL* ssl);
+ static std::optional<const WTF::String> GetServerName(const SSL* ssl);
private:
DeleteFnPtr<SSL, SSL_free> ssl_;
};
class X509Name final {
+ WTF_MAKE_TZONE_ALLOCATED(X509Name);
+
public:
X509Name();
explicit X509Name(const X509_NAME* name);
@@ -1007,7 +1016,7 @@ class X509Name final {
operator bool() const;
bool operator==(const Iterator& other) const;
bool operator!=(const Iterator& other) const;
- std::pair<std::string, std::string> operator*() const;
+ std::pair<WTF::String, WTF::String> operator*() const;
private:
const X509Name& name_;
@@ -1062,7 +1071,7 @@ class X509View final {
bool checkPrivateKey(const EVPKeyPointer& pkey) const;
bool checkPublicKey(const EVPKeyPointer& pkey) const;
- std::optional<std::string> getFingerprint(const EVP_MD* method) const;
+ std::optional<WTF::String> getFingerprint(const EVP_MD* method) const;
X509Pointer clone() const;
@@ -1072,16 +1081,16 @@ class X509View final {
INVALID_NAME,
OPERATION_FAILED,
};
- CheckMatch checkHost(const std::string_view host, int flags,
+ CheckMatch checkHost(const std::span<const char> host, int flags,
DataPointer* peerName = nullptr) const;
- CheckMatch checkEmail(const std::string_view email, int flags) const;
- CheckMatch checkIp(const std::string_view ip, int flags) const;
+ CheckMatch checkEmail(const std::span<const char> email, int flags) const;
+ CheckMatch checkIp(const char* ip, int flags) const;
- using UsageCallback = std::function<void(std::string_view)>;
+ using UsageCallback = WTF::Function<void(std::span<const char>)>;
bool enumUsages(UsageCallback callback) const;
template <typename T>
- using KeyCallback = std::function<bool(const T& t)>;
+ using KeyCallback = WTF::Function<bool(const T& t)>;
bool ifRsa(KeyCallback<Rsa> callback) const;
bool ifEc(KeyCallback<Ec> callback) const;
@@ -1090,6 +1099,8 @@ class X509View final {
};
class X509Pointer final {
+ WTF_MAKE_TZONE_ALLOCATED(X509Pointer);
+
public:
static Result<X509Pointer, int> Parse(Buffer<const unsigned char> buffer);
static X509Pointer IssuerFrom(const SSLPointer& ssl, const X509View& view);
@@ -1114,14 +1125,16 @@ class X509Pointer final {
X509View view() const;
operator X509View() const { return view(); }
- static std::string_view ErrorCode(int32_t err);
- static std::optional<std::string_view> ErrorReason(int32_t err);
+ static WTF::ASCIILiteral ErrorCode(int32_t err);
+ static std::optional<WTF::ASCIILiteral> ErrorReason(int32_t err);
private:
DeleteFnPtr<X509, X509_free> cert_;
};
class ECDSASigPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(ECDSASigPointer);
+
public:
explicit ECDSASigPointer();
explicit ECDSASigPointer(ECDSA_SIG* sig);
@@ -1154,6 +1167,8 @@ class ECDSASigPointer final {
};
class ECGroupPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(ECGroupPointer);
+
public:
explicit ECGroupPointer();
explicit ECGroupPointer(EC_GROUP* group);
@@ -1176,6 +1191,8 @@ class ECGroupPointer final {
};
class ECPointPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(ECPointPointer);
+
public:
ECPointPointer();
explicit ECPointPointer(EC_POINT* point);
@@ -1202,6 +1219,8 @@ class ECPointPointer final {
};
class ECKeyPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(ECKeyPointer);
+
public:
ECKeyPointer();
explicit ECKeyPointer(EC_KEY* key);
@@ -1242,6 +1261,8 @@ class ECKeyPointer final {
};
class EVPMDCtxPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(EVPMDCtxPointer);
+
public:
EVPMDCtxPointer();
explicit EVPMDCtxPointer(EVP_MD_CTX* ctx);
@@ -1286,6 +1307,8 @@ class EVPMDCtxPointer final {
};
class HMACCtxPointer final {
+ WTF_MAKE_TZONE_ALLOCATED(HMACCtxPointer);
+
public:
HMACCtxPointer();
explicit HMACCtxPointer(HMAC_CTX* ctx);
@@ -1331,7 +1354,7 @@ class EnginePointer final {
bool setAsDefault(uint32_t flags, CryptoErrorList* errors = nullptr);
bool init(bool finish_on_exit = false);
- EVPKeyPointer loadPrivateKey(const std::string_view key_name);
+ EVPKeyPointer loadPrivateKey(const WTF::StringView key_name);
// Release ownership of the ENGINE* pointer.
ENGINE* release();
@@ -1339,7 +1362,7 @@ class EnginePointer final {
// Retrieve an OpenSSL Engine instance by name. If the name does not
// identify a valid named engine, the returned EnginePointer will be
// empty.
- static EnginePointer getEngineByName(const std::string_view name,
+ static EnginePointer getEngineByName(const WTF::StringView name,
CryptoErrorList* errors = nullptr);
// Call once when initializing OpenSSL at startup for the process.
@@ -1396,8 +1419,8 @@ DataPointer ExportChallenge(const Buffer<const char>& buf);
// ============================================================================
// KDF
-const EVP_MD* getDigestByName(const std::string_view name);
-const EVP_CIPHER* getCipherByName(const std::string_view name);
+const EVP_MD* getDigestByName(const WTF::StringView name);
+const EVP_CIPHER* getCipherByName(const WTF::StringView name);
// Verify that the specified HKDF output length is valid for the given digest.
// The maximum length for HKDF output for a given digest is 255 times the
diff --git a/src/ncrypto.cpp b/src/ncrypto.cpp
index 2e411ce..2315eb5 100644
--- a/src/ncrypto.cpp
+++ b/src/ncrypto.cpp
@@ -1,3 +1,8 @@
+#include "root.h"
+#include "wtf/text/ASCIILiteral.h"
+#include "wtf/text/StringImpl.h"
+#include "wtf/text/WTFString.h"
+
#include "ncrypto.h"
#include <openssl/asn1.h>
@@ -75,22 +80,22 @@ void CryptoErrorList::capture() {
while (const auto err = ERR_get_error()) {
char buf[256];
ERR_error_string_n(err, buf, sizeof(buf));
- errors_.emplace_front(buf);
+ errors_.emplace_front(WTF::String::fromUTF8(buf));
}
}
-void CryptoErrorList::add(std::string error) { errors_.push_back(error); }
+void CryptoErrorList::add(WTF::String error) { errors_.push_back(error); }
-std::optional<std::string> CryptoErrorList::pop_back() {
+std::optional<WTF::String> CryptoErrorList::pop_back() {
if (errors_.empty()) return std::nullopt;
- std::string error = errors_.back();
+ WTF::String error = errors_.back();
errors_.pop_back();
return error;
}
-std::optional<std::string> CryptoErrorList::pop_front() {
+std::optional<WTF::String> CryptoErrorList::pop_front() {
if (errors_.empty()) return std::nullopt;
- std::string error = errors_.front();
+ WTF::String error = errors_.front();
errors_.pop_front();
return error;
}
@@ -1104,7 +1109,8 @@ bool X509View::checkPublicKey(const EVPKeyPointer& pkey) const {
return X509_verify(const_cast<X509*>(cert_), pkey.get()) == 1;
}
-X509View::CheckMatch X509View::checkHost(const std::string_view host, int flags,
+X509View::CheckMatch X509View::checkHost(const std::span<const char> host,
+ int flags,
DataPointer* peerName) const {
ClearErrorOnReturn clearErrorOnReturn;
if (cert_ == nullptr) return CheckMatch::NO_MATCH;
@@ -1127,7 +1133,7 @@ X509View::CheckMatch X509View::checkHost(const std::string_view host, int flags,
}
}
-X509View::CheckMatch X509View::checkEmail(const std::string_view email,
+X509View::CheckMatch X509View::checkEmail(const std::span<const char> email,
int flags) const {
ClearErrorOnReturn clearErrorOnReturn;
if (cert_ == nullptr) return CheckMatch::NO_MATCH;
@@ -1144,11 +1150,10 @@ X509View::CheckMatch X509View::checkEmail(const std::string_view email,
}
}
-X509View::CheckMatch X509View::checkIp(const std::string_view ip,
- int flags) const {
+X509View::CheckMatch X509View::checkIp(const char* ip, int flags) const {
ClearErrorOnReturn clearErrorOnReturn;
if (cert_ == nullptr) return CheckMatch::NO_MATCH;
- switch (X509_check_ip_asc(const_cast<X509*>(cert_), ip.data(), flags)) {
+ switch (X509_check_ip_asc(const_cast<X509*>(cert_), ip, flags)) {
case 0:
return CheckMatch::NO_MATCH;
case 1:
@@ -1172,7 +1177,7 @@ X509View X509View::From(const SSLCtxPointer& ctx) {
return X509View(SSL_CTX_get0_certificate(ctx.get()));
}
-std::optional<std::string> X509View::getFingerprint(
+std::optional<WTF::String> X509View::getFingerprint(
const EVP_MD* method) const {
unsigned int md_size;
unsigned char md[EVP_MAX_MD_SIZE];
@@ -1180,7 +1185,9 @@ std::optional<std::string> X509View::getFingerprint(
if (X509_digest(get(), method, md, &md_size)) {
if (md_size == 0) return std::nullopt;
- std::string fingerprint((md_size * 3) - 1, 0);
+ std::span<LChar> fingerprint;
+ WTF::String fingerprintStr =
+ WTF::String::createUninitialized((md_size * 3) - 1, fingerprint);
for (unsigned int i = 0; i < md_size; i++) {
auto idx = 3 * i;
fingerprint[idx] = hex[(md[i] & 0xf0) >> 4];
@@ -1189,7 +1196,7 @@ std::optional<std::string> X509View::getFingerprint(
fingerprint[idx + 2] = ':';
}
- return fingerprint;
+ return fingerprintStr;
}
return std::nullopt;
@@ -1299,10 +1306,10 @@ X509Pointer X509Pointer::PeerFrom(const SSLPointer& ssl) {
// When adding or removing errors below, please also update the list in the API
// documentation. See the "OpenSSL Error Codes" section of doc/api/errors.md
// Also *please* update the respective section in doc/api/tls.md as well
-std::string_view X509Pointer::ErrorCode(int32_t err) { // NOLINT(runtime/int)
+WTF::ASCIILiteral X509Pointer::ErrorCode(int32_t err) { // NOLINT(runtime/int)
#define CASE(CODE) \
case X509_V_ERR_##CODE: \
- return #CODE;
+ return #CODE##_s;
switch (err) {
CASE(UNABLE_TO_GET_ISSUER_CERT)
CASE(UNABLE_TO_GET_CRL)
@@ -1334,12 +1341,24 @@ std::string_view X509Pointer::ErrorCode(int32_t err) { // NOLINT(runtime/int)
CASE(HOSTNAME_MISMATCH)
}
#undef CASE
- return "UNSPECIFIED";
+ return "UNSPECIFIED"_s;
}
-std::optional<std::string_view> X509Pointer::ErrorReason(int32_t err) {
+std::optional<WTF::ASCIILiteral> X509Pointer::ErrorReason(int32_t err) {
if (err == X509_V_OK) return std::nullopt;
- return X509_verify_cert_error_string(err);
+
+ // TODO(dylan-conway): delete this switch?
+ switch (err) {
+#define V(name, msg) \
+ case X509_V_ERR_##name: \
+ return msg##_s;
+ V(HOSTNAME_MISMATCH, "Hostname does not match certificate")
+ V(EMAIL_MISMATCH, "Email address does not match certificate")
+ V(IP_ADDRESS_MISMATCH, "IP address does not match certificate")
+#undef V
+ }
+ return WTF::ASCIILiteral::fromLiteralUnsafe(
+ X509_verify_cert_error_string(err));
}
// ============================================================================
@@ -1385,9 +1404,10 @@ BIOPointer BIOPointer::New(const void* data, size_t len) {
return BIOPointer(BIO_new_mem_buf(data, len));
}
-BIOPointer BIOPointer::NewFile(std::string_view filename,
- std::string_view mode) {
- return BIOPointer(BIO_new_file(filename.data(), mode.data()));
+BIOPointer BIOPointer::NewFile(WTF::StringView filename, WTF::StringView mode) {
+ auto filenameUtf8 = filename.utf8();
+ auto modeUtf8 = mode.utf8();
+ return BIOPointer(BIO_new_file(filenameUtf8.data(), modeUtf8.data()));
}
BIOPointer BIOPointer::NewFp(FILE* fd, int close_flag) {
@@ -1400,20 +1420,18 @@ BIOPointer BIOPointer::New(const BIGNUM* bn) {
return res;
}
-int BIOPointer::Write(BIOPointer* bio, std::string_view message) {
- if (bio == nullptr || !*bio) return 0;
- return BIO_write(bio->get(), message.data(), message.size());
+int BIOPointer::Write(BIOPointer* bio, WTF::StringView message) {
+ auto messageUtf8 = message.utf8();
+ return Write(bio, messageUtf8.span());
}
// ============================================================================
// DHPointer
namespace {
-bool EqualNoCase(const std::string_view a, const std::string_view b) {
- if (a.size() != b.size()) return false;
- return std::equal(a.begin(), a.end(), b.begin(), b.end(), [](char a, char b) {
- return std::tolower(a) == std::tolower(b);
- });
+bool EqualNoCase(const WTF::StringView a, const WTF::StringView b) {
+ if (a.length() != b.length()) return false;
+ return a.startsWithIgnoringASCIICase(b);
}
} // namespace
@@ -1433,23 +1451,23 @@ void DHPointer::reset(DH* dh) { dh_.reset(dh); }
DH* DHPointer::release() { return dh_.release(); }
-BignumPointer DHPointer::FindGroup(const std::string_view name,
+BignumPointer DHPointer::FindGroup(const WTF::StringView name,
FindGroupOption option) {
#define V(n, p) \
if (EqualNoCase(name, n)) return BignumPointer(p(nullptr));
if (option != FindGroupOption::NO_SMALL_PRIMES) {
#ifndef OPENSSL_IS_BORINGSSL
// Boringssl does not support the 768 and 1024 small primes
- V("modp1", BN_get_rfc2409_prime_768);
- V("modp2", BN_get_rfc2409_prime_1024);
+ V("modp1"_s, BN_get_rfc2409_prime_768);
+ V("modp2"_s, BN_get_rfc2409_prime_1024);
#endif
- V("modp5", BN_get_rfc3526_prime_1536);
+ V("modp5"_s, BN_get_rfc3526_prime_1536);
}
- V("modp14", BN_get_rfc3526_prime_2048);
- V("modp15", BN_get_rfc3526_prime_3072);
- V("modp16", BN_get_rfc3526_prime_4096);
- V("modp17", BN_get_rfc3526_prime_6144);
- V("modp18", BN_get_rfc3526_prime_8192);
+ V("modp14"_s, BN_get_rfc3526_prime_2048);
+ V("modp15"_s, BN_get_rfc3526_prime_3072);
+ V("modp16"_s, BN_get_rfc3526_prime_4096);
+ V("modp17"_s, BN_get_rfc3526_prime_6144);
+ V("modp18"_s, BN_get_rfc3526_prime_8192);
#undef V
return {};
}
@@ -1461,7 +1479,7 @@ BignumPointer DHPointer::GetStandardGenerator() {
return bn;
}
-DHPointer DHPointer::FromGroup(const std::string_view name,
+DHPointer DHPointer::FromGroup(const WTF::StringView name,
FindGroupOption option) {
auto group = FindGroup(name, option);
if (!group) return {}; // Unable to find the named group.
@@ -1469,7 +1487,7 @@ DHPointer DHPointer::FromGroup(const std::string_view name,
auto generator = GetStandardGenerator();
if (!generator) return {}; // Unable to create the generator.
- return New(std::move(group), std::move(generator));
+ return New(WTFMove(group), WTFMove(generator));
}
DHPointer DHPointer::New(BignumPointer&& p, BignumPointer&& g) {
@@ -1663,17 +1681,24 @@ DataPointer DHPointer::stateless(const EVPKeyPointer& ourKey,
// ============================================================================
// KDF
-const EVP_MD* getDigestByName(const std::string_view name) {
+const EVP_MD* getDigestByName(const WTF::StringView name) {
// Historically, "dss1" and "DSS1" were DSA aliases for SHA-1
// exposed through the public API.
- if (name == "dss1" || name == "DSS1") [[unlikely]] {
+ if (name == "dss1"_s || name == "DSS1"_s) [[unlikely]] {
return EVP_sha1();
}
- return EVP_get_digestbyname(name.data());
+
+ // if (name == "ripemd160WithRSA"_s || name == "RSA-RIPEMD160"_s) {
+ // return EVP_ripemd160();
+ // }
+
+ auto nameUtf8 = name.utf8();
+ return EVP_get_digestbyname(nameUtf8.data());
}
-const EVP_CIPHER* getCipherByName(const std::string_view name) {
- return EVP_get_cipherbyname(name.data());
+const EVP_CIPHER* getCipherByName(const WTF::StringView name) {
+ auto nameUtf8 = name.utf8();
+ return EVP_get_cipherbyname(nameUtf8.data());
}
bool checkHkdfLength(const EVP_MD* md, size_t length) {
@@ -2499,7 +2524,7 @@ SSLPointer SSLPointer::New(const SSLCtxPointer& ctx) {
}
void SSLPointer::getCiphers(
- std::function<void(const std::string_view)> cb) const {
+ WTF::Function<void(const WTF::StringView)>&& cb) const {
if (!ssl_) return;
STACK_OF(SSL_CIPHER)* ciphers = SSL_get_ciphers(get());
@@ -2507,16 +2532,16 @@ void SSLPointer::getCiphers(
// document them, but since there are only 5, easier to just add them manually
// and not have to explain their absence in the API docs. They are lower-cased
// because the docs say they will be.
- static constexpr const char* TLS13_CIPHERS[] = {
- "tls_aes_256_gcm_sha384", "tls_chacha20_poly1305_sha256",
- "tls_aes_128_gcm_sha256", "tls_aes_128_ccm_8_sha256",
- "tls_aes_128_ccm_sha256"};
+ static constexpr WTF::ASCIILiteral TLS13_CIPHERS[] = {
+ "tls_aes_256_gcm_sha384"_s, "tls_chacha20_poly1305_sha256"_s,
+ "tls_aes_128_gcm_sha256"_s, "tls_aes_128_ccm_8_sha256"_s,
+ "tls_aes_128_ccm_sha256"_s};
const int n = sk_SSL_CIPHER_num(ciphers);
for (int i = 0; i < n; ++i) {
const SSL_CIPHER* cipher = sk_SSL_CIPHER_value(ciphers, i);
- cb(SSL_CIPHER_get_name(cipher));
+ cb(WTF::ASCIILiteral::fromLiteralUnsafe(SSL_CIPHER_get_name(cipher)));
}
for (unsigned i = 0; i < 5; ++i) {
@@ -2562,7 +2587,7 @@ std::optional<uint32_t> SSLPointer::verifyPeerCertificate() const {
return std::nullopt;
}
-const std::string_view SSLPointer::getClientHelloAlpn() const {
+const WTF::StringView SSLPointer::getClientHelloAlpn() const {
if (ssl_ == nullptr) return {};
#ifndef OPENSSL_IS_BORINGSSL
const unsigned char* buf;
@@ -2585,7 +2610,7 @@ const std::string_view SSLPointer::getClientHelloAlpn() const {
#endif
}
-const std::string_view SSLPointer::getClientHelloServerName() const {
+const WTF::StringView SSLPointer::getClientHelloServerName() const {
if (ssl_ == nullptr) return {};
#ifndef OPENSSL_IS_BORINGSSL
const unsigned char* buf;
@@ -2613,15 +2638,14 @@ const std::string_view SSLPointer::getClientHelloServerName() const {
#endif
}
-std::optional<const std::string_view> SSLPointer::GetServerName(
- const SSL* ssl) {
+std::optional<const WTF::String> SSLPointer::GetServerName(const SSL* ssl) {
if (ssl == nullptr) return std::nullopt;
auto res = SSL_get_servername(ssl, TLSEXT_NAMETYPE_host_name);
if (res == nullptr) return std::nullopt;
- return res;
+ return WTF::String::fromUTF8(res);
}
-std::optional<const std::string_view> SSLPointer::getServerName() const {
+std::optional<const WTF::String> SSLPointer::getServerName() const {
if (!ssl_) return std::nullopt;
return GetServerName(get());
}
@@ -2650,22 +2674,28 @@ EVPKeyPointer SSLPointer::getPeerTempKey() const {
return EVPKeyPointer(raw_key);
}
-std::optional<std::string_view> SSLPointer::getCipherName() const {
+std::optional<WTF::StringView> SSLPointer::getCipherName() const {
auto cipher = getCipher();
if (cipher == nullptr) return std::nullopt;
- return SSL_CIPHER_get_name(cipher);
+ const char* name = SSL_CIPHER_get_name(cipher);
+ if (!name) return std::nullopt;
+ return WTF::StringView::fromLatin1(name);
}
-std::optional<std::string_view> SSLPointer::getCipherStandardName() const {
+std::optional<WTF::StringView> SSLPointer::getCipherStandardName() const {
auto cipher = getCipher();
if (cipher == nullptr) return std::nullopt;
- return SSL_CIPHER_standard_name(cipher);
+ const char* name = SSL_CIPHER_standard_name(cipher);
+ if (!name) return std::nullopt;
+ return WTF::StringView::fromLatin1(name);
}
-std::optional<std::string_view> SSLPointer::getCipherVersion() const {
+std::optional<WTF::StringView> SSLPointer::getCipherVersion() const {
auto cipher = getCipher();
if (cipher == nullptr) return std::nullopt;
- return SSL_CIPHER_get_version(cipher);
+ auto version = SSL_CIPHER_get_version(cipher);
+ if (!version) return std::nullopt;
+ return WTF::StringView::fromLatin1(version);
}
SSLCtxPointer::SSLCtxPointer(SSL_CTX* ctx) : ctx_(ctx) {}
@@ -2713,8 +2743,9 @@ bool SSLCtxPointer::setGroups(const char* groups) {
// ============================================================================
-const Cipher Cipher::FromName(std::string_view name) {
- return Cipher(EVP_get_cipherbyname(name.data()));
+const Cipher Cipher::FromName(WTF::StringView name) {
+ auto nameUtf8 = name.utf8();
+ return Cipher(EVP_get_cipherbyname(nameUtf8.data()));
}
const Cipher Cipher::FromNid(int nid) {
@@ -2750,40 +2781,40 @@ int Cipher::getNid() const {
return EVP_CIPHER_nid(cipher_);
}
-std::string_view Cipher::getModeLabel() const {
+WTF::ASCIILiteral Cipher::getModeLabel() const {
if (!cipher_) return {};
switch (getMode()) {
case EVP_CIPH_CCM_MODE:
- return "ccm";
+ return "ccm"_s;
case EVP_CIPH_CFB_MODE:
- return "cfb";
+ return "cfb"_s;
case EVP_CIPH_CBC_MODE:
- return "cbc";
+ return "cbc"_s;
case EVP_CIPH_CTR_MODE:
- return "ctr";
+ return "ctr"_s;
case EVP_CIPH_ECB_MODE:
- return "ecb";
+ return "ecb"_s;
case EVP_CIPH_GCM_MODE:
- return "gcm";
+ return "gcm"_s;
case EVP_CIPH_OCB_MODE:
- return "ocb";
+ return "ocb"_s;
case EVP_CIPH_OFB_MODE:
- return "ofb";
+ return "ofb"_s;
case EVP_CIPH_WRAP_MODE:
- return "wrap";
+ return "wrap"_s;
case EVP_CIPH_XTS_MODE:
- return "xts";
+ return "xts"_s;
case EVP_CIPH_STREAM_CIPHER:
- return "stream";
+ return "stream"_s;
}
- return "{unknown}";
+ return "{unknown}"_s;
}
-std::string_view Cipher::getName() const {
+WTF::String Cipher::getName() const {
if (!cipher_) return {};
// OBJ_nid2sn(EVP_CIPHER_nid(cipher)) is used here instead of
// EVP_CIPHER_name(cipher) for compatibility with BoringSSL.
- return OBJ_nid2sn(getNid());
+ return WTF::String::fromUTF8(OBJ_nid2sn(getNid()));
}
bool Cipher::isSupportedAuthenticatedMode() const {
@@ -3497,15 +3528,15 @@ const std::optional<Rsa::PssParams> Rsa::getPssParams() const {
const RSA_PSS_PARAMS* params = RSA_get0_pss_params(rsa_);
if (params == nullptr) return std::nullopt;
Rsa::PssParams ret{
- .digest = OBJ_nid2ln(NID_sha1),
- .mgf1_digest = OBJ_nid2ln(NID_sha1),
+ .digest = WTF::StringView::fromLatin1(OBJ_nid2ln(NID_sha1)),
+ .mgf1_digest = WTF::StringView::fromLatin1(OBJ_nid2ln(NID_sha1)),
.salt_length = 20,
};
if (params->hashAlgorithm != nullptr) {
const ASN1_OBJECT* hash_obj;
X509_ALGOR_get0(&hash_obj, nullptr, nullptr, params->hashAlgorithm);
- ret.digest = OBJ_nid2ln(OBJ_obj2nid(hash_obj));
+ ret.digest = WTF::StringView::fromLatin1(OBJ_nid2ln(OBJ_obj2nid(hash_obj)));
}
if (params->maskGenAlgorithm != nullptr) {
@@ -3515,7 +3546,8 @@ const std::optional<Rsa::PssParams> Rsa::getPssParams() const {
if (mgf_nid == NID_mgf1) {
const ASN1_OBJECT* mgf1_hash_obj;
X509_ALGOR_get0(&mgf1_hash_obj, nullptr, nullptr, params->maskHash);
- ret.mgf1_digest = OBJ_nid2ln(OBJ_obj2nid(mgf1_hash_obj));
+ ret.mgf1_digest =
+ WTF::StringView::fromLatin1(OBJ_nid2ln(OBJ_obj2nid(mgf1_hash_obj)));
}
}
@@ -3627,8 +3659,8 @@ int Ec::getCurve() const { return EC_GROUP_get_curve_name(getGroup()); }
uint32_t Ec::getDegree() const { return EC_GROUP_get_degree(getGroup()); }
-std::string Ec::getCurveName() const {
- return std::string(OBJ_nid2sn(getCurve()));
+WTF::String Ec::getCurveName() const {
+ return WTF::String::fromUTF8(OBJ_nid2sn(getCurve()));
}
const EC_POINT* Ec::getPublicKey() const { return EC_KEY_get0_public_key(ec_); }
@@ -3891,7 +3923,7 @@ bool X509Name::Iterator::operator!=(const Iterator& other) const {
return loc_ != other.loc_;
}
-std::pair<std::string, std::string> X509Name::Iterator::operator*() const {
+std::pair<WTF::String, WTF::String> X509Name::Iterator::operator*() const {
if (loc_ == name_.total_) return {{}, {}};
X509_NAME_ENTRY* entry = X509_NAME_get_entry(name_, loc_);
@@ -3906,21 +3938,22 @@ std::pair<std::string, std::string> X509Name::Iterator::operator*() const {
}
int nid = OBJ_obj2nid(name);
- std::string name_str;
+ WTF::String name_str;
if (nid != NID_undef) {
- name_str = std::string(OBJ_nid2sn(nid));
+ name_str = WTF::String::fromUTF8(OBJ_nid2sn(nid));
} else {
char buf[80];
OBJ_obj2txt(buf, sizeof(buf), name, 0);
- name_str = std::string(buf);
+ name_str = WTF::String::fromUTF8(buf);
}
unsigned char* value_str;
int value_str_size = ASN1_STRING_to_UTF8(&value_str, value);
return {
- std::move(name_str),
- std::string(reinterpret_cast<const char*>(value_str), value_str_size)};
+ name_str,
+ WTF::String::fromUTF8(std::span(value_str, value_str_size)),
+ };
}
// ============================================================================

View File

@@ -48,7 +48,6 @@ const testsPath = join(cwd, "test");
const spawnTimeout = 5_000;
const testTimeout = 3 * 60_000;
const integrationTimeout = 5 * 60_000;
const napiTimeout = 10 * 60_000;
function getNodeParallelTestTimeout(testPath) {
if (testPath.includes("test-dns")) {
@@ -681,9 +680,6 @@ function getTestTimeout(testPath) {
if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) {
return integrationTimeout;
}
if (/napi/i.test(testPath)) {
return napiTimeout;
}
return testTimeout;
}

View File

@@ -91,10 +91,6 @@ export fn Bun__atexit(function: ExitFn) void {
}
}
pub fn addExitCallback(function: ExitFn) void {
Bun__atexit(function);
}
pub fn runExitCallbacks() void {
for (on_exit_callbacks.items) |callback| {
callback();

View File

@@ -79,11 +79,7 @@ pub fn scan(this: *HTMLScanner, input: []const u8) !void {
try processor.run(this, input);
}
pub fn HTMLProcessor(
comptime T: type,
/// If the visitor should visit html, head, body
comptime visit_document_tags: bool,
) type {
pub fn HTMLProcessor(comptime T: type, comptime visit_head_and_body: bool) type {
return struct {
const TagHandler = struct {
/// CSS selector to match elements
@@ -155,6 +151,12 @@ pub fn HTMLProcessor(
.url_attribute = "href",
.kind = .url,
},
// Catch-all for other links with href
.{
.selector = "link:not([rel~='stylesheet']):not([rel~='modulepreload']):not([rel~='manifest']):not([rel~='icon']):not([rel~='apple-touch-icon'])[href]",
.url_attribute = "href",
.kind = .url,
},
// Images with src
.{
.selector = "img[src]",
@@ -229,7 +231,7 @@ pub fn HTMLProcessor(
var builder = lol.HTMLRewriter.Builder.init();
defer builder.deinit();
var selectors: std.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_document_tags) 3 else 0) = .{};
var selectors: std.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_head_and_body) 2 else 0) = .{};
defer for (selectors.slice()) |selector| {
selector.deinit();
};
@@ -252,23 +254,36 @@ pub fn HTMLProcessor(
);
}
if (visit_document_tags) {
inline for (.{ "body", "head", "html" }, &.{ T.onBodyTag, T.onHeadTag, T.onHtmlTag }) |tag, cb| {
const head_selector = try lol.HTMLSelector.parse(tag);
selectors.appendAssumeCapacity(head_selector);
try builder.addElementContentHandlers(
head_selector,
T,
cb,
this,
void,
null,
null,
void,
null,
null,
);
}
if (visit_head_and_body) {
const head_selector = try lol.HTMLSelector.parse("head");
selectors.appendAssumeCapacity(head_selector);
try builder.addElementContentHandlers(
head_selector,
T,
T.onHeadTag,
this,
void,
null,
null,
void,
null,
null,
);
const body_selector = try lol.HTMLSelector.parse("body");
selectors.appendAssumeCapacity(body_selector);
try builder.addElementContentHandlers(
body_selector,
T,
T.onBodyTag,
this,
void,
null,
null,
void,
null,
null,
);
}
const memory_settings = lol.MemorySettings{

View File

@@ -62,11 +62,7 @@ pub const FileOperation = struct {
}
};
pub const Kind = @typeInfo(Value).Union.tag_type.?;
// TODO: document how and why all variants of this union(enum) are used,
// specifically .move and .copy; the new bundler has to load files in memory
// in order to hash them, so i think it uses .buffer for those
pub const Value = union(enum) {
pub const Value = union(Kind) {
move: FileOperation,
copy: FileOperation,
noop: u0,
@@ -146,6 +142,8 @@ pub const SavedFile = struct {
}
};
pub const Kind = enum { move, copy, noop, buffer, pending, saved };
pub fn initPending(loader: Loader, pending: resolver.Result) OutputFile {
return .{
.loader = loader,
@@ -233,55 +231,99 @@ pub fn init(options: Options) OutputFile {
};
}
pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u8) !void {
pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, longest_common_path: []const u8) ![]const u8 {
switch (f.value) {
.noop => {},
.saved => {
// already written to disk
var rel_path = f.dest_path;
if (f.dest_path.len > longest_common_path.len) {
rel_path = resolve_path.relative(longest_common_path, f.dest_path);
}
return rel_path;
},
.buffer => |value| {
var rel_path = f.dest_path;
if (f.dest_path.len > root_dir_path.len) {
rel_path = resolve_path.relative(root_dir_path, f.dest_path);
if (f.dest_path.len > longest_common_path.len) {
rel_path = resolve_path.relative(longest_common_path, f.dest_path);
if (std.fs.path.dirname(rel_path)) |parent| {
if (parent.len > root_dir_path.len) {
if (parent.len > longest_common_path.len) {
try root_dir.makePath(parent);
}
}
}
var path_buf: bun.PathBuffer = undefined;
_ = try JSC.Node.NodeFS.writeFileWithPathBuffer(&path_buf, .{
.data = .{ .buffer = .{
.buffer = .{
.ptr = @constCast(value.bytes.ptr),
.len = value.bytes.len,
.byte_len = value.bytes.len,
var handled_file_not_found = false;
while (true) {
var path_buf: bun.PathBuffer = undefined;
JSC.Node.NodeFS.writeFileWithPathBuffer(&path_buf, .{
.data = .{ .buffer = .{
.buffer = .{
.ptr = @constCast(value.bytes.ptr),
.len = value.bytes.len,
.byte_len = value.bytes.len,
},
} },
.encoding = .buffer,
.mode = if (f.is_executable) 0o755 else 0o644,
.dirfd = bun.toFD(root_dir.fd),
.file = .{ .path = .{
.string = JSC.PathString.init(rel_path),
} },
}).unwrap() catch |err| switch (err) {
error.FileNotFound, error.ENOENT => {
if (handled_file_not_found) return err;
handled_file_not_found = true;
try root_dir.makePath(
std.fs.path.dirname(rel_path) orelse
return err,
);
continue;
},
} },
.encoding = .buffer,
.mode = if (f.is_executable) 0o755 else 0o644,
.dirfd = bun.toFD(root_dir.fd),
.file = .{ .path = .{
.string = JSC.PathString.init(rel_path),
} },
}).unwrap();
else => return err,
};
break;
}
return rel_path;
},
.move => |value| {
try f.moveTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
_ = value;
// var filepath_buf: bun.PathBuffer = undefined;
// filepath_buf[0] = '.';
// filepath_buf[1] = '/';
// const primary = f.dest_path[root_dir_path.len..];
// bun.copy(u8, filepath_buf[2..], primary);
// var rel_path: []const u8 = filepath_buf[0 .. primary.len + 2];
// rel_path = value.pathname;
// try f.moveTo(root_path, @constCast(rel_path), bun.toFD(root_dir.fd));
{
@panic("TODO: Regressed behavior");
}
// return primary;
},
.copy => |value| {
try f.copyTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
_ = value;
// rel_path = value.pathname;
// try f.copyTo(root_path, @constCast(rel_path), bun.toFD(root_dir.fd));
{
@panic("TODO: Regressed behavior");
}
},
.noop => {
return f.dest_path;
},
.pending => unreachable,
}
}
pub fn moveTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
pub fn moveTo(file: *const OutputFile, _: string, rel_path: []u8, dir: FileDescriptorType) !void {
try bun.C.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0));
}
pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
pub fn copyTo(file: *const OutputFile, _: string, rel_path: []u8, dir: FileDescriptorType) !void {
const file_out = (try dir.asDir().createFile(rel_path, .{}));
const fd_out = file_out.handle;
@@ -325,7 +367,7 @@ pub fn toJS(
JSC.Node.PathOrFileDescriptor{
.path = JSC.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
},
this.loader.toMimeType(&.{owned_pathname orelse ""}),
this.loader.toMimeType(),
globalObject.allocator(),
) catch |err| {
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
@@ -356,7 +398,7 @@ pub fn toJS(
JSC.Node.PathOrFileDescriptor{
.path = JSC.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
},
this.loader.toMimeType(&.{owned_pathname orelse ""}),
this.loader.toMimeType(),
globalObject.allocator(),
) catch |err| {
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
@@ -382,10 +424,10 @@ pub fn toJS(
.buffer => |buffer| brk: {
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
if (blob.store) |store| {
store.mime_type = this.loader.toMimeType(&.{owned_pathname orelse ""});
store.mime_type = this.loader.toMimeType();
blob.content_type = store.mime_type.value;
} else {
blob.content_type = this.loader.toMimeType(&.{owned_pathname orelse ""}).value;
blob.content_type = this.loader.toMimeType().value;
}
blob.size = @as(JSC.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
@@ -429,7 +471,7 @@ pub fn toBlob(
JSC.Node.PathOrFileDescriptor{
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
},
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
this.loader.toMimeType(),
allocator,
);
@@ -447,7 +489,7 @@ pub fn toBlob(
JSC.Node.PathOrFileDescriptor{
.path = JSC.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
},
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
this.loader.toMimeType(),
allocator,
);
@@ -463,10 +505,10 @@ pub fn toBlob(
.buffer => |buffer| brk: {
var blob = JSC.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
if (blob.store) |store| {
store.mime_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text });
store.mime_type = this.loader.toMimeType();
blob.content_type = store.mime_type.value;
} else {
blob.content_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }).value;
blob.content_type = this.loader.toMimeType().value;
}
this.value = .{

View File

@@ -12,7 +12,6 @@ const Syscall = bun.sys;
const SourceMap = bun.sourcemap;
const StringPointer = bun.StringPointer;
const macho = bun.macho;
const w = std.os.windows;
pub const StandaloneModuleGraph = struct {
@@ -114,23 +113,6 @@ pub const StandaloneModuleGraph = struct {
cjs = 2,
};
const Macho = struct {
pub extern "C" fn Bun__getStandaloneModuleGraphMachoLength() ?*align(1) u32;
pub fn getData() ?[]const u8 {
if (Bun__getStandaloneModuleGraphMachoLength()) |length| {
if (length.* < 8) {
return null;
}
const slice_ptr: [*]const u8 = @ptrCast(length);
return slice_ptr[4..][0..length.*];
}
return null;
}
};
pub const File = struct {
name: []const u8 = "",
loader: bun.options.Loader,
@@ -251,7 +233,7 @@ pub const StandaloneModuleGraph = struct {
});
stored.external_source_names = file_names;
stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)), .load_hint = .none };
stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)) };
stored.is_standalone_module_graph = true;
const parsed = stored.new(); // allocate this on the heap
@@ -476,7 +458,7 @@ pub const StandaloneModuleGraph = struct {
windows_hide_console: bool = false,
};
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) bun.FileDescriptor {
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions) bun.FileDescriptor {
var buf: bun.PathBuffer = undefined;
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get temporary file name: {s}", .{@errorName(err)});
@@ -615,127 +597,71 @@ pub const StandaloneModuleGraph = struct {
break :brk fd;
};
switch (target.os) {
.mac => {
const input_result = bun.sys.File.readToEnd(.{ .handle = cloned_executable_fd }, bun.default_allocator);
if (input_result.err) |err| {
Output.prettyErrorln("Error reading standalone module graph: {}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
}
var macho_file = bun.macho.MachoFile.init(bun.default_allocator, input_result.bytes.items, bytes.len) catch |err| {
Output.prettyErrorln("Error initializing standalone module graph: {}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
};
defer macho_file.deinit();
macho_file.writeSection(bytes) catch |err| {
Output.prettyErrorln("Error writing standalone module graph: {}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
};
input_result.bytes.deinit();
var total_byte_count: usize = undefined;
switch (Syscall.setFileOffset(cloned_executable_fd, 0)) {
if (Environment.isWindows) {
total_byte_count = bytes.len + 8 + (Syscall.setFileOffsetToEndWindows(cloned_executable_fd).unwrap() catch |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to seek to end of temporary file\n{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
});
} else {
const seek_position = @as(u64, @intCast(brk: {
const fstat = switch (Syscall.fstat(cloned_executable_fd)) {
.result => |res| res,
.err => |err| {
Output.prettyErrorln("Error seeking to start of temporary file: {}", .{err});
Output.prettyErrorln("{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
},
else => {},
}
var file = bun.sys.File{ .handle = cloned_executable_fd };
const writer = file.writer();
const BufferedWriter = std.io.BufferedWriter(512 * 1024, @TypeOf(writer));
var buffered_writer = bun.default_allocator.create(BufferedWriter) catch bun.outOfMemory();
buffered_writer.* = .{
.unbuffered_writer = writer,
};
macho_file.buildAndSign(buffered_writer.writer()) catch |err| {
Output.prettyErrorln("Error writing standalone module graph: {}", .{err});
break :brk @max(fstat.size, 0);
}));
total_byte_count = seek_position + bytes.len + 8;
// From https://man7.org/linux/man-pages/man2/lseek.2.html
//
// lseek() allows the file offset to be set beyond the end of the
// file (but this does not change the size of the file). If data is
// later written at this point, subsequent reads of the data in the
// gap (a "hole") return null bytes ('\0') until data is actually
// written into the gap.
//
switch (Syscall.setFileOffset(cloned_executable_fd, seek_position)) {
.err => |err| {
Output.prettyErrorln(
"{}\nwhile seeking to end of temporary file (pos: {d})",
.{
err,
seek_position,
},
);
cleanup(zname, cloned_executable_fd);
Global.exit(1);
};
buffered_writer.flush() catch |err| {
Output.prettyErrorln("Error flushing standalone module graph: {}", .{err});
},
else => {},
}
}
var remain = bytes;
while (remain.len > 0) {
switch (Syscall.write(cloned_executable_fd, bytes)) {
.result => |written| remain = remain[written..],
.err => |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to write to temporary file\n{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
};
if (comptime !Environment.isWindows) {
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
}
return cloned_executable_fd;
},
else => {
var total_byte_count: usize = undefined;
if (Environment.isWindows) {
total_byte_count = bytes.len + 8 + (Syscall.setFileOffsetToEndWindows(cloned_executable_fd).unwrap() catch |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to seek to end of temporary file\n{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
});
} else {
const seek_position = @as(u64, @intCast(brk: {
const fstat = switch (Syscall.fstat(cloned_executable_fd)) {
.result => |res| res,
.err => |err| {
Output.prettyErrorln("{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
},
};
},
}
}
break :brk @max(fstat.size, 0);
}));
total_byte_count = seek_position + bytes.len + 8;
// From https://man7.org/linux/man-pages/man2/lseek.2.html
//
// lseek() allows the file offset to be set beyond the end of the
// file (but this does not change the size of the file). If data is
// later written at this point, subsequent reads of the data in the
// gap (a "hole") return null bytes ('\0') until data is actually
// written into the gap.
//
switch (Syscall.setFileOffset(cloned_executable_fd, seek_position)) {
.err => |err| {
Output.prettyErrorln(
"{}\nwhile seeking to end of temporary file (pos: {d})",
.{
err,
seek_position,
},
);
cleanup(zname, cloned_executable_fd);
Global.exit(1);
},
else => {},
}
}
var remain = bytes;
while (remain.len > 0) {
switch (Syscall.write(cloned_executable_fd, bytes)) {
.result => |written| remain = remain[written..],
.err => |err| {
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to write to temporary file\n{}", .{err});
cleanup(zname, cloned_executable_fd);
Global.exit(1);
},
}
}
// the final 8 bytes in the file are the length of the module graph with padding, excluding the trailer and offsets
_ = Syscall.write(cloned_executable_fd, std.mem.asBytes(&total_byte_count));
if (comptime !Environment.isWindows) {
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
}
return cloned_executable_fd;
},
// the final 8 bytes in the file are the length of the module graph with padding, excluding the trailer and offsets
_ = Syscall.write(cloned_executable_fd, std.mem.asBytes(&total_byte_count));
if (comptime !Environment.isWindows) {
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
}
if (Environment.isWindows and inject_options.windows_hide_console) {
@@ -793,7 +719,6 @@ pub const StandaloneModuleGraph = struct {
Global.exit(1);
},
.{ .windows_hide_console = windows_hide_console },
target,
);
fd.assertKind(.system);
@@ -836,6 +761,29 @@ pub const StandaloneModuleGraph = struct {
Global.exit(1);
};
if (comptime Environment.isMac) {
if (target.os == .mac) {
var signer = std.process.Child.init(
&.{
"codesign",
"--remove-signature",
temp_location,
},
bun.default_allocator,
);
if (bun.logger.Log.default_log_level.atLeast(.verbose)) {
signer.stdout_behavior = .Inherit;
signer.stderr_behavior = .Inherit;
signer.stdin_behavior = .Inherit;
} else {
signer.stdout_behavior = .Ignore;
signer.stderr_behavior = .Ignore;
signer.stdin_behavior = .Ignore;
}
_ = signer.spawnAndWait() catch {};
}
}
bun.C.moveFileZWithHandle(
fd,
bun.FD.cwd(),
@@ -857,22 +805,6 @@ pub const StandaloneModuleGraph = struct {
}
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
if (comptime Environment.isMac) {
const macho_bytes = Macho.getData() orelse return null;
if (macho_bytes.len < @sizeOf(Offsets) + trailer.len) {
Output.debugWarn("bun standalone module graph is too small to be valid", .{});
return null;
}
const macho_bytes_slice = macho_bytes[macho_bytes.len - @sizeOf(Offsets) - trailer.len ..];
const trailer_bytes = macho_bytes[macho_bytes.len - trailer.len ..][0..trailer.len];
if (!bun.strings.eqlComptime(trailer_bytes, trailer)) {
Output.debugWarn("bun standalone module graph has invalid trailer", .{});
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, macho_bytes_slice).*;
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(macho_bytes), offsets);
}
// Do not invoke libuv here.
const self_exe = openSelf() catch return null;
defer _ = Syscall.close(self_exe);

View File

@@ -1,11 +1,12 @@
//! Rope-like data structure for joining many small strings into one big string.
//! Implemented as a linked list of potentially-owned slices and a length.
const StringJoiner = @This();
const std = @import("std");
const default_allocator = bun.default_allocator;
const bun = @import("root").bun;
const string = bun.string;
const Allocator = std.mem.Allocator;
const NullableAllocator = bun.NullableAllocator;
const StringJoiner = @This();
const assert = bun.assert;
/// Temporary allocator used for nodes and duplicated strings.
@@ -153,7 +154,7 @@ pub fn ensureNewlineAtEnd(this: *StringJoiner) void {
}
}
pub fn contains(this: *const StringJoiner, slice: []const u8) bool {
pub fn contains(this: *const StringJoiner, slice: string) bool {
var el = this.head;
while (el) |node| {
el = node.next;

View File

@@ -1,184 +0,0 @@
//! AllocationScope wraps another allocator, providing leak and invalid free assertions.
//! It also allows measuring how much memory a scope has allocated.
const AllocationScope = @This();
pub const enabled = bun.Environment.isDebug;
parent: Allocator,
state: if (enabled) struct {
mutex: bun.Mutex,
total_memory_allocated: usize,
allocations: std.AutoHashMapUnmanaged([*]const u8, Allocation),
frees: std.AutoArrayHashMapUnmanaged([*]const u8, Free),
/// Once `frees` fills up, entries are overwritten from start to end.
free_overwrite_index: std.math.IntFittingRange(0, max_free_tracking + 1),
} else void,
pub const max_free_tracking = 2048 - 1;
pub const Allocation = struct {
allocated_at: StoredTrace,
len: usize,
};
pub const Free = struct {
allocated_at: StoredTrace,
freed_at: StoredTrace,
};
pub fn init(parent: Allocator) AllocationScope {
return if (enabled)
.{
.parent = parent,
.state = .{
.total_memory_allocated = 0,
.allocations = .empty,
.frees = .empty,
.free_overwrite_index = 0,
.mutex = .{},
},
}
else
.{ .parent = parent, .state = {} };
}
pub fn deinit(scope: *AllocationScope) void {
if (enabled) {
scope.state.mutex.lock();
defer scope.state.allocations.deinit(scope.parent);
const count = scope.state.allocations.count();
if (count == 0) return;
Output.errGeneric("Allocation scope leaked {d} allocations ({})", .{
count,
bun.fmt.size(scope.state.total_memory_allocated, .{}),
});
var it = scope.state.allocations.iterator();
var n: usize = 0;
while (it.next()) |entry| {
Output.prettyErrorln("- {any}, len {d}, at:", .{ entry.key_ptr.*, entry.value_ptr.len });
bun.crash_handler.dumpStackTrace(entry.value_ptr.allocated_at.trace());
n += 1;
if (n >= 8) {
Output.prettyErrorln("(only showing first 10 leaks)", .{});
break;
}
}
Output.panic("Allocation scope leaked {}", .{bun.fmt.size(scope.state.total_memory_allocated, .{})});
}
}
pub fn allocator(scope: *AllocationScope) Allocator {
return if (enabled) .{ .ptr = scope, .vtable = &vtable } else scope.parent;
}
const vtable: Allocator.VTable = .{
.alloc = alloc,
.resize = resize,
.free = free,
};
fn alloc(ctx: *anyopaque, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
scope.state.allocations.ensureUnusedCapacity(scope.parent, 1) catch
return null;
const result = scope.parent.vtable.alloc(scope.parent.ptr, len, ptr_align, ret_addr) orelse
return null;
const trace = StoredTrace.capture(ret_addr);
scope.state.allocations.putAssumeCapacityNoClobber(result, .{
.allocated_at = trace,
.len = len,
});
scope.state.total_memory_allocated += len;
return result;
}
fn resize(ctx: *anyopaque, buf: []u8, buf_align: u8, new_len: usize, ret_addr: usize) bool {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
return scope.parent.vtable.resize(scope.parent.ptr, buf, buf_align, new_len, ret_addr);
}
fn free(ctx: *anyopaque, buf: []u8, buf_align: u8, ret_addr: usize) void {
const scope: *AllocationScope = @ptrCast(@alignCast(ctx));
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
var invalid = false;
if (scope.state.allocations.fetchRemove(buf.ptr)) |entry| {
scope.state.total_memory_allocated -= entry.value.len;
free_entry: {
scope.state.frees.put(scope.parent, buf.ptr, .{
.allocated_at = entry.value.allocated_at,
.freed_at = StoredTrace.capture(ret_addr),
}) catch break :free_entry;
// Store a limited amount of free entries
if (scope.state.frees.count() >= max_free_tracking) {
const i = scope.state.free_overwrite_index;
scope.state.free_overwrite_index = @mod(scope.state.free_overwrite_index + 1, max_free_tracking);
scope.state.frees.swapRemoveAt(i);
}
}
} else {
invalid = true;
bun.Output.errGeneric("Invalid free, pointer {any}, len {d}", .{ buf.ptr, buf.len });
if (scope.state.frees.get(buf.ptr)) |free_entry_const| {
var free_entry = free_entry_const;
bun.Output.printErrorln("Pointer allocated here:", .{});
bun.crash_handler.dumpStackTrace(free_entry.allocated_at.trace());
bun.Output.printErrorln("Pointer first freed here:", .{});
bun.crash_handler.dumpStackTrace(free_entry.freed_at.trace());
}
// do not panic because address sanitizer will catch this case better.
// the log message is in case there is a situation where address
// sanitizer does not catch the invalid free.
}
scope.parent.vtable.free(scope.parent.ptr, buf, buf_align, ret_addr);
// If asan did not catch the free, panic now.
if (invalid) @panic("Invalid free");
}
pub fn assertOwned(scope: *AllocationScope, ptr: anytype) void {
if (!enabled) return;
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
.c, .one, .many => ptr,
.slice => if (ptr.len > 0) ptr.ptr else return,
});
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
_ = scope.state.allocations.getPtr(cast_ptr) orelse
@panic("this pointer was not owned by the allocation scope");
}
pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void {
if (!enabled) return;
const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) {
.c, .one, .many => ptr,
.slice => if (ptr.len > 0) ptr.ptr else return,
});
scope.state.mutex.lock();
defer scope.state.mutex.unlock();
if (scope.state.allocations.getPtr(cast_ptr)) |owned| {
Output.debugWarn("Pointer allocated here:");
bun.crash_handler.dumpStackTrace(owned.allocated_at.trace());
}
@panic("this pointer was owned by the allocation scope when it was not supposed to be");
}
pub inline fn downcast(a: Allocator) ?*AllocationScope {
return if (enabled and a.vtable == &vtable)
@ptrCast(@alignCast(a.ptr))
else
null;
}
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("root").bun;
const Output = bun.Output;
const StoredTrace = bun.crash_handler.StoredTrace;

View File

@@ -95,10 +95,6 @@ pub const Features = struct {
pub var fetch: usize = 0;
pub var git_dependencies: usize = 0;
pub var html_rewriter: usize = 0;
/// TCP server from `Bun.listen`
pub var tcp_server: usize = 0;
/// TLS server from `Bun.listen`
pub var tls_server: usize = 0;
pub var http_server: usize = 0;
pub var https_server: usize = 0;
/// Set right before JSC::initialize is called

View File

@@ -800,6 +800,9 @@ pub const Api = struct {
/// import_source
import_source: []const u8,
/// react_fast_refresh
react_fast_refresh: bool = false,
pub fn decode(reader: anytype) anyerror!Jsx {
var this = std.mem.zeroes(Jsx);
@@ -808,6 +811,7 @@ pub const Api = struct {
this.fragment = try reader.readValue([]const u8);
this.development = try reader.readValue(bool);
this.import_source = try reader.readValue([]const u8);
this.react_fast_refresh = try reader.readValue(bool);
return this;
}
@@ -817,6 +821,7 @@ pub const Api = struct {
try writer.writeValue(@TypeOf(this.fragment), this.fragment);
try writer.writeInt(@as(u8, @intFromBool(this.development)));
try writer.writeValue(@TypeOf(this.import_source), this.import_source);
try writer.writeInt(@as(u8, @intFromBool(this.react_fast_refresh)));
}
};
@@ -1704,8 +1709,6 @@ pub const Api = struct {
serve_env_prefix: ?[]const u8 = null,
serve_splitting: bool = false,
serve_public_path: ?[]const u8 = null,
serve_hmr: ?bool = null,
serve_define: ?StringMap = null,
bunfig_path: []const u8,

View File

@@ -174,7 +174,7 @@ pub fn BabyList(comptime Type: type) type {
bun.assert(this.cap >= this.len);
}
pub fn initCapacity(allocator: std.mem.Allocator, len: usize) std.mem.Allocator.Error!ListType {
pub fn initCapacity(allocator: std.mem.Allocator, len: usize) !ListType {
return initWithBuffer(try allocator.alloc(Type, len));
}

View File

@@ -1,6 +0,0 @@
// @ts-ignore
import { fn, t } from "../codegen/bindgen-lib";
export const getDeinitCountForTesting = fn({
args: {},
ret: t.usize,
});

File diff suppressed because it is too large Load Diff

View File

@@ -423,7 +423,7 @@ pub const Style = union(enum) {
pub fn fromJS(value: JSValue, global: *JSC.JSGlobalObject) !Style {
if (value.isString()) {
const bun_string = try value.toBunString(global);
const bun_string = try value.toBunString2(global);
var sfa = std.heap.stackFallback(4096, bun.default_allocator);
const utf8 = bun_string.toUTF8(sfa.get());
defer utf8.deinit();
@@ -1166,7 +1166,7 @@ pub const JSFrameworkRouter = struct {
pub fn match(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, callframe: *JSC.CallFrame) !JSValue {
const path_js = callframe.argumentsAsArray(1)[0];
const path_str = try path_js.toBunString(global);
const path_str = try path_js.toBunString2(global);
defer path_str.deref();
const path_slice = path_str.toSlice(bun.default_allocator);
defer path_slice.deinit();

View File

@@ -11,8 +11,6 @@ interface Config {
separateSSRGraph?: true;
// Client
/** Bun version */
bun: string;
/** Dev Server's `configuration_hash_key` */
version: string;
/** If available, this is the Id of `react-refresh/runtime` */

View File

@@ -93,12 +93,14 @@ pub const StringRefList = struct {
pub const SplitBundlerOptions = struct {
plugin: ?*Plugin = null,
all: BuildConfigSubset = .{},
client: BuildConfigSubset = .{},
server: BuildConfigSubset = .{},
ssr: BuildConfigSubset = .{},
pub const empty: SplitBundlerOptions = .{
.plugin = null,
.all = .{},
.client = .{},
.server = .{},
.ssr = .{},
@@ -154,7 +156,12 @@ const BuildConfigSubset = struct {
drop: bun.StringArrayHashMapUnmanaged(void) = .{},
env: bun.Schema.Api.DotEnvBehavior = ._none,
env_prefix: ?[]const u8 = null,
define: bun.Schema.Api.StringMap = .{ .keys = &.{}, .values = &.{} },
pub fn loadFromJs(config: *BuildConfigSubset, value: JSValue, arena: Allocator) !void {
_ = config; // autofix
_ = value; // autofix
_ = arena; // autofix
}
};
/// A "Framework" in our eyes is simply set of bundler options that a framework
@@ -361,7 +368,7 @@ pub const Framework = struct {
arena: Allocator,
) !Framework {
if (opts.isString()) {
const str = try opts.toBunString(global);
const str = try opts.toBunString2(global);
defer str.deref();
// Deprecated
@@ -401,7 +408,7 @@ pub const Framework = struct {
return global.throwInvalidArguments("'framework.reactFastRefresh' is missing 'importSource'", .{});
};
const str = try prop.toBunString(global);
const str = try prop.toBunString2(global);
defer str.deref();
break :brk .{
@@ -587,7 +594,7 @@ pub const Framework = struct {
pub fn initTranspiler(
framework: *Framework,
arena: std.mem.Allocator,
allocator: std.mem.Allocator,
log: *bun.logger.Log,
mode: Mode,
renderer: Graph,
@@ -595,7 +602,7 @@ pub const Framework = struct {
bundler_options: *const BuildConfigSubset,
) !void {
out.* = try bun.Transpiler.init(
arena,
allocator, // TODO: this is likely a memory leak
log,
std.mem.zeroes(bun.Schema.Api.TransformOptions),
null,
@@ -627,7 +634,7 @@ pub const Framework = struct {
out.options.react_fast_refresh = mode == .development and renderer == .client and framework.react_fast_refresh != null;
out.options.server_components = framework.server_components != null;
out.options.conditions = try bun.options.ESMConditions.init(arena, out.options.target.defaultConditions());
out.options.conditions = try bun.options.ESMConditions.init(allocator, out.options.target.defaultConditions());
if (renderer == .server and framework.server_components != null) {
try out.options.conditions.appendSlice(&.{"react-server"});
}
@@ -635,9 +642,6 @@ pub const Framework = struct {
// Support `esm-env` package using this condition.
try out.options.conditions.appendSlice(&.{"development"});
}
if (bundler_options.conditions.count() > 0) {
try out.options.conditions.appendSlice(bundler_options.conditions.keys());
}
out.options.production = mode != .development;
out.options.tree_shaking = mode != .development;
@@ -646,13 +650,10 @@ pub const Framework = struct {
out.options.minify_whitespace = mode != .development;
out.options.css_chunking = true;
out.options.framework = framework;
if (bundler_options.ignoreDCEAnnotations) |ignore|
out.options.ignore_dce_annotations = ignore;
out.options.source_map = switch (mode) {
// Source maps must always be external, as DevServer special cases
// the linking and part of the generation of these. It also relies
// on source maps always being enabled.
// Source maps must always be linked, as DevServer special cases the
// linking and part of the generation of these.
.development => .external,
// TODO: follow user configuration
else => .none,
@@ -668,25 +669,11 @@ pub const Framework = struct {
out.options.jsx.development = mode == .development;
try addImportMetaDefines(arena, out.options.define, mode, switch (renderer) {
try addImportMetaDefines(allocator, out.options.define, mode, switch (renderer) {
.client => .client,
.server, .ssr => .server,
});
if ((bundler_options.define.keys.len + bundler_options.drop.count()) > 0) {
for (bundler_options.define.keys, bundler_options.define.values) |k, v| {
const parsed = try bun.options.Define.Data.parse(k, v, false, false, log, arena);
try out.options.define.insert(arena, k, parsed);
}
for (bundler_options.drop.keys()) |drop_item| {
if (drop_item.len > 0) {
const parsed = try bun.options.Define.Data.parse(drop_item, "", true, true, log, arena);
try out.options.define.insert(arena, drop_item, parsed);
}
}
}
if (mode != .development) {
// Hide information about the source repository, at the cost of debugging quality.
out.options.entry_naming = "_bun/[hash].[ext]";
@@ -709,7 +696,7 @@ fn getOptionalString(
return null;
if (value == .undefined or value == .null)
return null;
const str = try value.toBunString(global);
const str = try value.toBunString2(global);
return allocations.track(str.toUTF8(arena));
}
@@ -807,6 +794,14 @@ pub fn addImportMetaDefines(
"import.meta.env.STATIC",
Define.Data.initBoolean(mode == .production_static),
);
if (mode != .development) {
try define.insert(
allocator,
"import.meta.hot",
Define.Data.initBoolean(false),
);
}
}
pub const server_virtual_source: bun.logger.Source = .{

View File

@@ -54,7 +54,7 @@ export function renderToHtml(
});
// The root is this "Root" component that unwraps the streamed promise
// with `use`, and then returning the parsed React component for the UI.
const Root: any = () => React.use(promise);
const Root = () => React.use(promise);
// `renderToPipeableStream` is what actually generates HTML.
// Here is where React is told what script tags to inject.

View File

@@ -562,24 +562,6 @@ export class DraculaSyntaxHighlighter {
return this.buildHtmlElement("pre", { "class": classAttr }, result);
}
public highlightLine() {
let lineContent = "";
for (const token of this.tokenize()) {
if (token.type === TokenType.Newline) {
continue;
}
if (token.tokenClass) {
lineContent += this.wrap(token.value, token.tokenClass);
} else {
lineContent += this.escapeHtml(token.value);
}
}
return lineContent;
}
private escapeHtml(str: string): string {
return str
.replace(/&/g, "&amp;")
@@ -812,7 +794,3 @@ export class DraculaSyntaxHighlighter {
return false;
}
}
export function syntaxHighlight(code: string) {
return new DraculaSyntaxHighlighter(code).highlightLine();
}

View File

@@ -1,103 +0,0 @@
import { td, te } from "../shared";
export class DataViewReader {
view: DataView<ArrayBuffer>;
cursor: number;
constructor(view: DataView<ArrayBuffer>, cursor: number = 0) {
this.view = view;
this.cursor = cursor;
}
u32() {
const value = this.view.getUint32(this.cursor, true);
this.cursor += 4;
return value;
}
i32() {
const value = this.view.getInt32(this.cursor, true);
this.cursor += 4;
return value;
}
u16() {
const value = this.view.getUint16(this.cursor, true);
this.cursor += 2;
return value;
}
u8() {
const value = this.view.getUint8(this.cursor);
this.cursor += 1;
return value;
}
stringWithLength(byteLength: number) {
const str = td.decode(this.view.buffer.slice(this.cursor, this.cursor + byteLength));
this.cursor += byteLength;
return str;
}
string32() {
return this.stringWithLength(this.u32());
}
hasMoreData() {
return this.cursor < this.view.byteLength;
}
rest() {
return this.view.buffer.slice(this.cursor);
}
}
export class DataViewWriter {
view: DataView<ArrayBuffer>;
uint8ArrayView: Uint8Array;
cursor: number;
capacity: number;
static initCapacity(capacity: number) {
const view = new DataView(new ArrayBuffer(capacity));
return new DataViewWriter(view, 0, capacity);
}
constructor(view: DataView<ArrayBuffer>, cursor: number, capacity: number) {
this.view = view;
this.cursor = cursor;
this.capacity = capacity;
this.uint8ArrayView = new Uint8Array(view.buffer);
}
u8(value: number) {
this.view.setUint8(this.cursor, value);
this.cursor += 1;
}
u32(value: number) {
this.view.setUint32(this.cursor, value, true);
this.cursor += 4;
}
i32(value: number) {
this.view.setInt32(this.cursor, value, true);
this.cursor += 4;
}
string(value: string) {
if (value.length === 0) return;
const encodeResult = te.encodeInto(value, this.uint8ArrayView.subarray(this.cursor));
if (encodeResult.read !== value.length) {
throw new Error("Failed to encode string");
}
this.cursor += encodeResult.written;
}
stringWithLength(value: string) {
const cursor = this.cursor;
this.u32(0);
this.string(value);
this.view.setUint32(cursor, this.cursor - cursor - 4, true);
}
}

View File

@@ -1,6 +1,6 @@
// This implements error deserialization from the WebSocket protocol
import { BundlerMessageLevel } from "../enums";
import { DataViewReader } from "./data-view";
import { DataViewReader } from "./reader";
export interface DeserializedFailure {
// If not specified, it is a client-side error.

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#FFF"><path d="m256-200-56-56 224-224-224-224 56-56 224 224 224-224 56 56-224 224 224 224-56 56-224-224-224 224Z"/></svg>

Before

Width:  |  Height:  |  Size: 219 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#ff5858"><path d="M647-440H160v-80h487L423-744l57-56 320 320-320 320-57-56 224-224Z"/></svg>

Before

Width:  |  Height:  |  Size: 190 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#ff5858"><path d="m313-440 224 224-57 56-320-320 320-320 57 56-224 224h487v80H313Z"/></svg>

Before

Width:  |  Height:  |  Size: 189 B

View File

@@ -3,8 +3,6 @@
* the user's application causes no issue. This sheet is used to
* style error popups and other elements provided by DevServer.
*/
/* Reset and base styles */
* {
box-sizing: border-box;
margin: 0;
@@ -12,43 +10,18 @@
}
.root {
all: initial;
/* TODO: revive light theme error modal */
/* color-scheme: light dark;
color-scheme: light dark;
--modal-bg: light-dark(#efefef, #202020);
--modal-text: light-dark(#0a0a0a, #fafafa);
--modal-text-faded: light-dark(#0a0a0a88, #fafafa88);
--item-bg: light-dark(#d4d4d4, #0f0f0f);
--item-bg-main: light-dark(#d4d4d4, #262626);
--item-bg-hover: light-dark(#cccccc, #171717);
--red: #ff5858;
--log-error: light-dark(#dc0000, #ff5858);
--log-warn: light-dark(#eab308, #fbbf24);
--log-note: light-dark(#008ae6, #22d3ee);
--log-colon: light-dark(#888, #888); */
--modal-bg: #202020;
--modal-text: #fafafa;
--modal-text-faded: #fafafa88;
--item-bg: #0f0f0f;
--item-bg-main: #262626;
--log-error: #ff5858;
--log-warn: #fbbf24;
--log-note: #22d3ee;
--log-colon: #888;
--red: #ff5858;
--red-faded: #ff58582F;
--error-bg: #ff58582F;
--warn-bg: #eab3082F;
--syntax-comment: #6272a4;
--syntax-cyan: #8be9fd;
--syntax-green: #50fa7b;
--syntax-orange: #ffb86c;
--syntax-pink: #ff79c6;
--syntax-purple: #bd93f9;
--syntax-red: #ff5555;
--syntax-yellow: #f1fa8c;
--log-colon: light-dark(#888, #888);
font-family:
system-ui,
@@ -73,30 +46,11 @@
}
code,
.message,
.function-name,
.file-name,
.code {
.message {
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
}
pre, code {
font: unset;
}
button {
appearance: none;
background-color: transparent;
border: none;
font-size: unset;
cursor: pointer;
}
/* Helper classes */
.flex { flex: 1 }
.muted { color: var(--modal-text-faded) }
/* Modal */
.modal {
color: var(--modal-text);
background-color: var(--modal-bg);
@@ -110,169 +64,109 @@ button {
0 2px 32px #0003;
}
footer {
display: flex;
color: var(--modal-text-faded);
margin: 1rem;
}
/* Navigation bar */
nav {
display: flex;
margin: 1rem;
align-items: center;
color: var(--modal-text-faded);
}
.tab-button {
width: 24px;
height: 24px;
border-radius: 4px;
background-color: var(--red-faded);
}
.tab-button[disabled] {
opacity: 0.5;
}
.tab-button.left {
border-top-right-radius: 0;
border-bottom-right-radius: 0;
margin-right: 2px;
}
.tab-button.right {
border-top-left-radius: 0;
border-bottom-left-radius: 0;
margin-right: 0.5rem;
}
.dismiss-all {
width: 24px;
height: 24px;
}
/* Header / Title */
header {
margin: 1rem;
display: flex;
align-items: center;
}
.title {
margin: 1rem 1rem;
color: var(--red);
font-size: 2rem;
font-weight: bold;
}
/* Runtime Error */
.r-error {
background-color: var(--item-bg);
}
.message-desc {
padding: 0rem 1.5rem;
}
.message-desc.error {
padding: 1rem 1.5rem;
background-color: var(--error-bg);
}
.message-desc.warn{
padding: 1rem 1.5rem;
background-color: var(--warn-bg);
}
.r-error .name {
color: var(--red);
font-weight: bold;
footer {
color: var(--modal-text-faded);
margin: 1rem;
}
.file-name {
color: var(--modal-text);
font-weight: bold;
}
.r-code-wrap {
background-color: var(--item-bg-main);
--color: var(--log-error);
pre {
font: unset;
}
/* Bundler messages */
.b-group {
.message-group {
display: flex;
flex-direction: column;
background-color: var(--item-bg);
}
.b-group + .b-group {
border-top: 1px solid var(--modal-text-faded);
/* this is a <button> */
.file-name {
appearance: none;
background-color: transparent;
border: none;
font-size: unset;
font-weight: bold;
padding: 0.5rem 1rem;
text-align: left;
/* cursor: pointer; */
}
.b-group .code {
padding-bottom: 1rem;
/* .file-name:hover,
.file-name:focus-visible {
background-color: var(--item-bg-hover);
}
.file-name::after {
color: var(--modal-text-faded);
font-size: 70%;
}
.file-name:hover::after,
.file-name:focus-visible {
content: " (click to open in editor)";
} */
.message {
margin: 1rem;
margin-bottom: 0;
}
button + .message {
margin-top: 0.5rem;
}
.message-text > span {
color: var(--color);
}
.message-text:last-child {
margin-bottom: 1rem;
}
.log-error {
--color: var(--log-error);
font-weight: bold;
}
.log-warn { --color: var(--log-warn); }
.log-note { --color: var(--log-note); }
.log-colon { --color: var(--log-colon); }
.log-label { color: var(--color); }
.message-desc.note + .code {
padding-top: 0.75rem;
.log-warn {
--color: var(--log-warn);
}
/* Trace Frames */
.trace-frame {
padding: 0.7rem 1.5rem;
}
.trace-frame + .trace-frame {
margin-top: -0.5rem;
}
.function-name {
color: var(--syntax-cyan);
font-style: italic;
.log-note {
--color: var(--log-note);
}
/* Code View + Message Highlighting (Underline) */
.code {
.log-colon {
--color: var(--log-colon);
}
.code-line {
display: flex;
padding-top: 1rem;
margin: 0.5rem 0;
}
.code .gutter {
display: flex;
flex-direction: column;
padding: 0.3rem 0;
padding-left: 0.5rem;
text-align: right;
margin-right: 1rem;
padding-right: 0.5rem;
user-select: none;
pointer-events: none;
.line-num {
color: var(--modal-text-faded);
border-right: 1px solid var(--modal-text-faded);
border-width: 1.5px;
}
.code .gutter div {
white-space: pre;
}
.code .gutter .highlight-gap {
font-size: 8px;
}
.code .view {
padding: 0.3rem 0;
margin-right: 10px;
}
.highlight-wrap {
color: transparent;
user-select: none;
-webkit-user-select: none;
pointer-events: none;
transform: translateY(-16px);
margin-bottom: -10px;
}
.highlight-wrap:last-child {
margin-bottom: 0;
position: absolute;
}
.highlight-gap {
height: 10px;
transform: translateY(-20px);
margin-bottom: -5px;
}
.highlight-wrap .line {
margin-left: 10px;
text-decoration: underline;
text-decoration-style: wavy;
text-decoration-color: var(--color);
@@ -284,24 +178,3 @@ header {
font-weight: bold;
}
}
/* Syntax Highlighting */
.syntax-pink { color: var(--syntax-pink); }
.syntax-cyan { color: var(--syntax-cyan); }
.syntax-orange { color: var(--syntax-orange); }
.syntax-red { color: var(--syntax-red); }
.syntax-green { color: var(--syntax-green); }
.syntax-yellow { color: var(--syntax-yellow); }
.syntax-gray { color: var(--syntax-comment); }
.syntax-purple { color: var(--syntax-purple); }
/* Icons */
.tab-button.left { background-image: url(./icons/prev.svg); }
.tab-button.right { background-image: url(./icons/next.svg); }
.dismiss-all {
background-image: url(./icons/dismiss.svg);
opacity: 0.5;
}
@media (prefers-color-scheme: light) {
.dismiss-all { filter: invert(1); }
}

View File

@@ -2,70 +2,30 @@
// React could collide with the user's code (consider React DevTools), this
// entire modal is written from scratch using the standard DOM APIs. All CSS is
// scoped in `overlay.css`, and all elements exist under a shadow root. These
// constraints make the overlay very simple to understand and work on.
// constraints make the overlay simple to understand and work on.
//
// This file has two consumers:
// - The bundler error page which embeds a list of bundler errors to render.
// - The client runtime, for when reloading errors happen.
// Both use a WebSocket to coordinate followup updates, when new errors are
// added or previous ones are solved.
if (side !== "client") throw new Error("Not client side!");
// NOTE: imports are at the bottom for readability
import { BundlerMessageLevel } from "../enums";
import { css } from "../macros" with { type: "macro" };
import {
BundlerMessage,
BundlerMessageLocation,
BundlerNote,
decodeSerializedError,
type DeserializedFailure,
} from "./error-serialization";
import { DataViewReader } from "./reader";
if (side !== "client") throw new Error("Not client side!");
/** When set, the next successful build will reload the page. */
export let hasFatalError = false;
/**
* 32-bit integer corresponding to `SerializedFailure.Owner.Packed`
* It is never decoded client-side.
*/
type FailureOwner = number;
/**
* Build errors come from SerializedFailure objects on the server, with the key
* being the the SerializedFailure.Owner bitcast to an i32.
*/
const buildErrors = new Map<FailureOwner, DeserializedFailure>();
/** Runtime errors are stored in a list and are cleared before any hot update. */
const runtimeErrors: RuntimeError[] = [];
const errorDoms = new Map<FailureOwner, ErrorDomNodes>();
const updatedErrorOwners = new Set<FailureOwner>();
/**
* -1 => All build errors
* 0.. => Runtime error by index
*/
let activeErrorIndex = -1;
let lastActiveErrorIndex = -1;
let needUpdateNavbar = false;
let domShadowRoot: HTMLElement;
let domModalTitle: HTMLElement;
let domErrorContent: HTMLElement;
/** For build errors */
let domFooterText: HTMLElement;
/** For runtime errors */
let domNavBar: {
root: HTMLElement;
active: HTMLElement;
total: HTMLElement;
label: Text;
prevBtn: HTMLButtonElement;
nextBtn: HTMLButtonElement;
dismissAllBtn: HTMLButtonElement;
} = {} as any;
// I would have used JSX, but TypeScript types interfere in odd ways. However,
// this pattern allows concise construction of DOM nodes, but also extremely
// simple capturing of referenced nodes. Consider:
// let title;
// const btn = elem("button", { class: "file-name" }, [(title = textNode())]);
// Now you can edit `title.textContent` freely.
function elem<T extends keyof HTMLElementTagNameMap>(
tagName: T,
props?: null | Record<string, string>,
children?: Node[],
) {
// I would have used JSX, but TypeScript types interfere in odd ways.
function elem(tagName: string, props?: null | Record<string, string>, children?: (HTMLElement | Text)[]) {
const node = document.createElement(tagName);
if (props)
for (let key in props) {
@@ -78,11 +38,7 @@ function elem<T extends keyof HTMLElementTagNameMap>(
return node;
}
function elemText<T extends keyof HTMLElementTagNameMap>(
tagName: T,
props: null | Record<string, string>,
innerHTML: string,
) {
function elemText(tagName: string, props: null | Record<string, string>, innerHTML: string) {
const node = document.createElement(tagName);
if (props)
for (let key in props) {
@@ -94,38 +50,26 @@ function elemText<T extends keyof HTMLElementTagNameMap>(
const textNode = (str = "") => document.createTextNode(str);
/**
* 32-bit integer corresponding to `SerializedFailure.Owner.Packed`
* It is never decoded client-side; treat this as an opaque identifier.
*/
type ErrorId = number;
const errors = new Map<ErrorId, DeserializedFailure>();
const errorDoms = new Map<ErrorId, ErrorDomNodes>();
const updatedErrorOwners = new Set<ErrorId>();
let domShadowRoot: HTMLElement;
let domModalTitle: Text;
let domErrorList: HTMLElement;
interface ErrorDomNodes {
root: HTMLElement;
fileName: Text;
title: Text;
messages: HTMLElement[];
}
interface RuntimeError {
/** error.name */
name: string;
/** error.message */
message: string;
/** error.stack after remapping */
trace: RemappedFrame[];
/** When the `fetch` request fails or takes too long */
remapped: boolean;
/** Promise rejection */
async: boolean;
code?: CodePreview;
}
interface CodePreview {
lines: string[];
col: number;
loi: number;
len: number;
firstLine: number;
}
interface RemappedFrame extends Frame {}
declare const OVERLAY_CSS: string;
/**
* Initial mount is done lazily. The modal starts invisible, controlled
* by `setModalVisible`.
@@ -134,56 +78,29 @@ function mountModal() {
if (domModalTitle) return;
domShadowRoot = elem("bun-hmr", {
style:
"position:fixed!important;" +
"position:absolute!important;" +
"display:none!important;" +
"top:0!important;" +
"left:0!important;" +
"width:100%!important;" +
"height:100%!important;" +
"background:#8883!important" +
"z-index:2147483647!important",
"background:#8883!important",
});
const shadow = domShadowRoot.attachShadow({ mode: "open" });
const sheet = new CSSStyleSheet();
sheet.replace(OVERLAY_CSS);
sheet.replace(css("client/overlay.css", IS_BUN_DEVELOPMENT));
shadow.adoptedStyleSheets = [sheet];
const root = elem("div", { class: "root" }, [
elem("div", { class: "modal" }, [
// Runtime errors get a switcher to toggle between each runtime error and
// the build errors. This is done because runtime errors are very big.
// Only visible when a runtime error is present.
(domNavBar.root = elem("nav", null, [
// TODO: use SVG for this
(domNavBar.prevBtn = elemText(
"button",
{ class: "tab-button left", disabled: "true", "aria-label": "Previous error" },
"",
)),
(domNavBar.nextBtn = elemText("button", { class: "tab-button right", "aria-label": "Next error" }, "")),
elem("span", null, [
(domNavBar.active = elem("code")),
textNode(" of "),
(domNavBar.total = elem("code")),
(domNavBar.label = textNode(" Errors")),
]),
elem("div", { class: "flex" }),
(domNavBar.dismissAllBtn = elem("button", { class: "dismiss-all", "aria-label": "Dismiss all errors" })),
])),
// The active page's header
elem("header", null, [(domModalTitle = elem("div", { class: "title" }))]),
// The active page's content
(domErrorContent = elem("div", { class: "error-content" })),
elem("header", null, [(domModalTitle = textNode())]),
(domErrorList = elem("div", { class: "error-list" })),
elem("footer", null, [
(domFooterText = elemText("div", null, "")),
elem("div", { class: "flex" }),
elemText("div", null, "Bun v" + config.bun),
// TODO: for HMR turn this into a clickable thing + say it can be dismissed
textNode("Errors during a build can only be dismissed fixing them."),
]),
]),
]);
domNavBar.dismissAllBtn.addEventListener("click", onDismissAllErrors);
domNavBar.prevBtn.addEventListener("click", onPrevError);
domNavBar.nextBtn.addEventListener("click", onNextError);
shadow.appendChild(root);
document.body.appendChild(domShadowRoot);
}
@@ -196,176 +113,42 @@ function setModalVisible(visible: boolean) {
}
/** Handler for `MessageId.errors` websocket packet */
export function onServerErrorPayload(view: DataView<ArrayBuffer>) {
export function onErrorMessage(view: DataView<ArrayBuffer>) {
const reader = new DataViewReader(view, 1);
const removedCount = reader.u32();
for (let i = 0; i < removedCount; i++) {
const removed = reader.u32();
updatedErrorOwners.add(removed);
buildErrors.delete(removed);
errors.delete(removed);
}
while (reader.hasMoreData()) {
decodeAndAppendServerError(reader);
decodeAndAppendError(reader);
}
updateErrorOverlay();
}
export async function onRuntimeError(err: any, fatal = false, async = false) {
try {
if (fatal) {
hasFatalError = true;
}
// Parse the stack trace and normalize the error message.
let name = err?.name ?? "error";
if (name === "Error") name = "error";
let message = err?.message;
if (!message)
try {
message = JSON.stringify(err);
} catch (e) {
message = "[error while serializing error: " + e + "]";
}
else if (typeof message !== "string") {
try {
message = JSON.stringify(message);
} catch (e) {
message = "[error while serializing error message: " + e + "]";
}
}
const parsed = parseStackTrace(err) ?? [];
const browserUrl = location.href;
// Serialize the request into a binary buffer. Pre-allocate a little above what it needs.
let bufferLength = 3 * 4 + (name.length + message.length + browserUrl.length) * 3;
for (const frame of parsed) {
bufferLength += 4 * 4 + ((frame.fn?.length ?? 0) + (frame.file?.length ?? 0)) * 3;
}
const writer = DataViewWriter.initCapacity(bufferLength);
writer.stringWithLength(name);
writer.stringWithLength(message);
writer.stringWithLength(browserUrl);
writer.u32(parsed.length);
for (const frame of parsed) {
writer.u32(frame.line ?? 0);
writer.u32(frame.col ?? 0);
writer.stringWithLength(frame.fn ?? "");
const fileName = frame.file;
if (fileName) {
writer.stringWithLength(fileName);
} else {
writer.u32(0);
}
}
// Request the error to be reported and remapped.
const response = await fetch("/_bun/report_error", {
method: "POST",
body: writer.view.buffer,
});
try {
if (!response.ok) {
throw new Error("Failed to report error");
}
const reader = new DataViewReader(new DataView(await response.arrayBuffer()), 0);
const trace: Frame[] = [];
const traceLen = reader.u32();
for (let i = 0; i < traceLen; i++) {
const line = reader.i32();
const col = reader.i32();
const fn = reader.string32();
const file = reader.string32();
trace.push({
fn,
file,
line,
col,
});
}
let code: CodePreview | undefined;
const codePreviewLineCount = reader.u8();
if (codePreviewLineCount > 0) {
const lineOfInterestOffset = reader.u32();
const firstLineNumber = reader.u32();
const highlightedColumn = reader.u32();
let lines = new Array(codePreviewLineCount);
for (let i = 0; i < codePreviewLineCount; i++) {
const line = reader.string32();
lines[i] = line;
}
const { col, len } = expandHighlight(lines[lineOfInterestOffset], highlightedColumn);
lines = lines.map(line => syntaxHighlight(line));
code = {
lines,
col,
loi: lineOfInterestOffset,
len,
firstLine: firstLineNumber,
};
}
runtimeErrors.push({
name,
message,
trace,
remapped: true,
async,
code,
});
} catch (e) {
console.error("Failed to remap error", e);
runtimeErrors.push({
name,
message,
trace: parsed,
remapped: false,
async,
});
}
needUpdateNavbar = true;
updateErrorOverlay();
} catch (e) {
console.error("Failed to report error", e);
}
export const enum RuntimeErrorType {
recoverable,
/** Requires that clearances perform a full page reload */
fatal,
}
function expandHighlight(line: string, col: number) {
let rest = line.slice(Math.max(0, col - 1));
let len = 1;
len = 0;
let prev = line.slice(0, col - 1);
// expand forward from new
if (rest.match(/^new\s/)) {
len += 4;
rest = rest.slice(4);
export function onRuntimeError(err: any, type: RuntimeErrorType) {
if (type === RuntimeErrorType.fatal) {
hasFatalError = true;
}
// expand backward from new
const newText = prev.match(/new\s+$/)?.[0];
if (newText) {
len += newText.length;
col -= newText.length;
prev = prev.slice(0, prev.length - newText.length);
}
// expand backward from throw
const throwText = prev.match(/throw\s+$/)?.[0];
if (throwText) {
len += throwText.length;
col -= throwText.length;
}
len += (rest.match(/.\b/)?.index ?? -1) + 1;
if (len <= 0) len = 1;
return { col, len };
console.error(err);
}
/**
* Call this for each error, then call `updateErrorOverlay` to commit the
* changes to the UI in one smooth motion.
*/
export function decodeAndAppendServerError(r: DataViewReader) {
export function decodeAndAppendError(r: DataViewReader) {
const owner = r.u32();
const file = r.string32() || null;
const messageCount = r.u32();
@@ -373,20 +156,12 @@ export function decodeAndAppendServerError(r: DataViewReader) {
for (let i = 0; i < messageCount; i++) {
messages[i] = decodeSerializedError(r);
}
buildErrors.set(owner, { file, messages });
errors.set(owner, { file, messages });
updatedErrorOwners.add(owner);
activeErrorIndex = -1;
needUpdateNavbar = true;
}
/**
* Called when the list of errors changes, bundling errors change, or the active error page changes.
*/
export function updateErrorOverlay() {
// if there are no errors, hide the modal
const totalErrors = runtimeErrors.length + buildErrors.size;
if (totalErrors === 0) {
if (errors.size === 0) {
if (IS_ERROR_RUNTIME) {
location.reload();
} else {
@@ -394,138 +169,13 @@ export function updateErrorOverlay() {
}
return;
}
// ensure the target page is valid
if (activeErrorIndex === -1 && buildErrors.size === 0) {
activeErrorIndex = 0; // there is a runtime error, else this modal will be hidden
needUpdateNavbar = true;
} else if (activeErrorIndex >= runtimeErrors.length) {
needUpdateNavbar = true;
if (activeErrorIndex === 0) {
activeErrorIndex = -1; // there must be a build error, else this modal will be hidden
} else {
activeErrorIndex = runtimeErrors.length - 1;
}
}
mountModal();
if (needUpdateNavbar) {
needUpdateNavbar = false;
if (activeErrorIndex >= 0) {
// Runtime errors
const err = runtimeErrors[activeErrorIndex];
domModalTitle.innerHTML = err.async ? "Unhandled Promise Rejection" : "Runtime Error";
updateRuntimeErrorOverlay(err);
} else {
// Build errors
domModalTitle.innerHTML = `<span class="count">${buildErrors.size}</span> Build Error${buildErrors.size === 1 ? "" : "s"}`;
}
domNavBar.active.textContent = (activeErrorIndex + 1 + (buildErrors.size > 0 ? 1 : 0)).toString();
domNavBar.total.textContent = totalErrors.toString();
domNavBar.label.textContent = totalErrors === 1 ? " Error" : " Errors";
domNavBar.nextBtn.disabled = activeErrorIndex >= runtimeErrors.length - 1;
domNavBar.prevBtn.disabled = buildErrors.size > 0 ? activeErrorIndex < 0 : activeErrorIndex == 0;
}
if (activeErrorIndex === -1) {
if (lastActiveErrorIndex !== -1) {
// clear the error content from the runtime error
domErrorContent.innerHTML = "";
updateBuildErrorOverlay({ remountAll: true });
} else {
updateBuildErrorOverlay({});
}
}
lastActiveErrorIndex = activeErrorIndex;
// The footer is only visible if there are build errors.
if (buildErrors.size > 0) {
domFooterText.style.display = "block";
domFooterText.innerText =
activeErrorIndex === -1
? "Errors during a build can only be dismissed by fixing them."
: "This dialog cannot be dismissed as there are additional build errors.";
} else {
domFooterText.style.display = "none";
}
domNavBar.dismissAllBtn.style.display = buildErrors.size > 0 ? "none" : "block";
// The navbar is only visible if there are runtime errors. It contains the dismiss button.
domNavBar.root.style.display = runtimeErrors.length > 0 ? "flex" : "none";
setModalVisible(true);
}
/**
* Called when switching between runtime errors.
*/
function updateRuntimeErrorOverlay(err: RuntimeError) {
domErrorContent.innerHTML = ""; // clear contents
const dom = elem("div", { class: "r-error" });
let name = err.name;
if (!name || name === "Error") name = "error";
dom.appendChild(
elem("div", { class: "message-desc error" }, [
elemText("code", { class: "name" }, name),
elemText("code", { class: "muted" }, ": "),
elemText("code", {}, err.message),
]),
);
const { code } = err;
let trace = err.trace;
if (code) {
const {
lines,
col: columnToHighlight,
loi: lineOfInterestOffset,
len: highlightLength,
firstLine: firstLineNumber,
} = code;
const codeFrame = trace[0];
trace = trace.slice(1);
const domCode = elem("div", { class: "r-code-wrap" });
const aboveRoi = lines.slice(0, lineOfInterestOffset + 1);
const belowRoi = lines.slice(lineOfInterestOffset + 1);
const gutter = elem("div", { class: "gutter" }, [
elemText("div", null, aboveRoi.map((_, i) => `${i + firstLineNumber}`).join("\n")),
elem("div", { class: "highlight-gap" }),
elemText("div", null, belowRoi.map((_, i) => `${i + firstLineNumber + aboveRoi.length}`).join("\n")),
]);
domCode.appendChild(
elem("div", { class: "code" }, [
gutter,
elem("div", { class: "view" }, [
...aboveRoi.map(line => mapCodePreviewLine(line)),
elem("div", { class: "highlight-wrap log-error" }, [
elemText("span", { class: "space" }, "_".repeat(columnToHighlight - 1)),
elemText("span", { class: "line" }, "_".repeat(highlightLength)),
]),
...belowRoi.map(line => mapCodePreviewLine(line)),
]),
]),
);
domCode.appendChild(renderTraceFrame(codeFrame, "trace-frame"));
dom.appendChild(domCode);
}
dom.appendChild(
elem("div", { class: "r-error-trace" }, [...trace.map(frame => renderTraceFrame(frame, "trace-frame"))]),
);
domErrorContent.appendChild(dom);
}
function updateBuildErrorOverlay({ remountAll = false }) {
let totalCount = 0;
const owners = remountAll ? buildErrors.keys() : updatedErrorOwners;
for (const owner of owners) {
const data = buildErrors.get(owner);
for (const owner of updatedErrorOwners) {
const data = errors.get(owner);
let dom = errorDoms.get(owner);
// If this failure was removed, delete it.
@@ -538,13 +188,28 @@ function updateBuildErrorOverlay({ remountAll = false }) {
totalCount += data.messages.length;
// Create the element for the root if it does not yet exist.
if (!dom || remountAll) {
let fileName;
const root = elem("div", { class: "b-group" }, [
elem("div", { class: "trace-frame" }, [elem("div", { class: "file-name" }, [(fileName = textNode())])]),
if (!dom) {
let title;
let btn;
const root = elem("div", { class: "message-group" }, [
// (btn = elem("button", { class: "file-name" }, [(title = textNode())])),
elem("div", { class: "file-name" }, [(title = textNode())]),
]);
dom = { root, fileName, messages: [] };
domErrorContent.appendChild(root);
// btn.addEventListener("click", () => {
// const firstLocation = errors.get(owner)?.messages[0]?.location;
// if (!firstLocation) return;
// let fileName = title.textContent.replace(/^\//, "");
// fetch("/_bun/src/" + fileName, {
// headers: {
// "Open-In-Editor": "1",
// "Editor-Line": firstLocation.line.toString(),
// "Editor-Column": firstLocation.column.toString(),
// },
// });
// });
dom = { root, title, messages: [] };
// TODO: sorted insert?
domErrorList.appendChild(root);
errorDoms.set(owner, dom);
} else {
// For simplicity, messages are not reused, even if left unchanged.
@@ -552,7 +217,7 @@ function updateBuildErrorOverlay({ remountAll = false }) {
}
// Update the DOM with the new data.
dom.fileName.textContent = data.file;
dom.title.textContent = data.file;
for (const msg of data.messages) {
const domMessage = renderBundlerMessage(msg);
@@ -560,13 +225,12 @@ function updateBuildErrorOverlay({ remountAll = false }) {
dom.messages.push(domMessage);
}
}
updatedErrorOwners.clear();
}
function mapCodePreviewLine(line: string) {
const pre = elem("pre");
pre.innerHTML = line;
return pre;
domModalTitle.textContent = `${errors.size} Build Error${errors.size !== 1 ? "s" : ""}`;
updatedErrorOwners.clear();
setModalVisible(true);
}
const bundleLogLevelToName = ["error", "warn", "note", "debug", "verbose"];
@@ -574,7 +238,7 @@ const bundleLogLevelToName = ["error", "warn", "note", "debug", "verbose"];
function renderBundlerMessage(msg: BundlerMessage) {
return elem(
"div",
{ class: "b-msg" },
{ class: "message" },
[
renderErrorMessageLine(msg.level, msg.message),
...(msg.location ? renderCodeLine(msg.location, msg.level) : []),
@@ -583,31 +247,13 @@ function renderBundlerMessage(msg: BundlerMessage) {
);
}
function renderTraceFrame(frame: Frame, className: string) {
const hasFn = !!frame.fn;
return elem("div", { class: className }, [
elemText("span", { class: "muted" }, "at "),
...(hasFn
? [
//
elemText("span", { class: "function-name" }, frame.fn),
elemText("span", { class: "muted" }, " in "),
]
: []),
elemText("span", { class: "file-name" }, frame.file!),
...(frame.line
? [elemText("code", { class: "muted" }, `:${frame.line}` + (frame.col ? `:${frame.col}` : ""))]
: []),
]);
}
function renderErrorMessageLine(level: BundlerMessageLevel, text: string) {
const levelName = bundleLogLevelToName[level];
if (IS_BUN_DEVELOPMENT && !levelName) {
throw new Error("Unknown log level: " + level);
}
return elem("div", { class: "message-desc " + levelName }, [
elemText("span", { class: "log-label log-" + levelName }, levelName),
return elem("div", { class: "message-text" }, [
elemText("span", { class: "log-" + levelName }, levelName),
elemText("span", { class: "log-colon" }, ": "),
elemText("span", { class: "log-text" }, text),
]);
@@ -615,15 +261,13 @@ function renderErrorMessageLine(level: BundlerMessageLevel, text: string) {
function renderCodeLine(location: BundlerMessageLocation, level: BundlerMessageLevel) {
return [
elem("div", { class: "code" }, [
elem("div", { class: "gutter" }, [elemText("div", null, `${location.line}`)]),
elem("div", { class: "view" }, [
mapCodePreviewLine(syntaxHighlight(location.lineText)),
elem("div", { class: "highlight-wrap log-" + bundleLogLevelToName[level] }, [
elemText("span", { class: "space" }, "_".repeat(location.column - 1)),
elemText("span", { class: "line" }, "_".repeat(location.length)),
]),
]),
elem("div", { class: "code-line" }, [
elemText("code", { class: "line-num" }, `${location.line}`),
elemText("pre", { class: "code-view" }, location.lineText),
]),
elem("div", { class: "highlight-wrap log-" + bundleLogLevelToName[level] }, [
elemText("span", { class: "space" }, "_".repeat(`${location.line}`.length + location.column - 1)),
elemText("span", { class: "line" }, "_".repeat(location.length)),
]),
];
}
@@ -634,47 +278,3 @@ function renderNote(note: BundlerNote) {
...(note.location ? renderCodeLine(note.location, BundlerMessageLevel.note) : []),
];
}
function onDismissAllErrors() {
if (buildErrors.size === 0) {
setModalVisible(false);
} else {
// Cannot dismiss build errors?
activeErrorIndex = -1;
updateErrorOverlay();
}
}
function onPrevError() {
if (activeErrorIndex === -1) return;
if (activeErrorIndex === 0 && buildErrors.size === 0) return;
activeErrorIndex--;
needUpdateNavbar = true;
updateErrorOverlay();
}
function onNextError() {
if (activeErrorIndex >= runtimeErrors.length - 1) return;
activeErrorIndex++;
needUpdateNavbar = true;
updateErrorOverlay();
}
declare global {
interface HTMLElementTagNameMap {
"bun-hmr": HTMLElement;
}
}
import { BundlerMessageLevel } from "../enums";
import { css } from "../macros" with { type: "macro" };
import {
BundlerMessage,
BundlerMessageLocation,
BundlerNote,
decodeSerializedError,
type DeserializedFailure,
} from "./error-serialization";
import { DataViewReader, DataViewWriter } from "./data-view";
import { parseStackTrace, type Frame } from "./stack-trace";
import { syntaxHighlight } from "./JavaScriptSyntaxHighlighter";

Some files were not shown because too many files have changed in this diff Show More