mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
1 Commits
claude/css
...
claude/rep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b10b907fc |
19
.aikido
19
.aikido
@@ -1,19 +0,0 @@
|
||||
exclude:
|
||||
paths:
|
||||
- test
|
||||
- scripts
|
||||
- bench
|
||||
- packages/bun-lambda
|
||||
- packages/bun-release
|
||||
- packages/bun-wasm
|
||||
- packages/bun-vscode
|
||||
- packages/bun-plugin-yaml
|
||||
- packages/bun-plugin-svelte
|
||||
- packages/bun-native-plugin-rs
|
||||
- packages/bun-native-bundler-plugin-api
|
||||
- packages/bun-inspector-protocol
|
||||
- packages/bun-inspector-frontend
|
||||
- packages/bun-error
|
||||
- packages/bun-debug-adapter-protocol
|
||||
- packages/bun-build-mdx-rs
|
||||
- packages/@types/bun
|
||||
@@ -133,20 +133,6 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64";
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
# The following is necessary to configure buildkite to use a stable
|
||||
# checkout directory. sccache hashes absolute paths into its cache keys,
|
||||
# so if buildkite uses a different checkout path each time (which it does
|
||||
# by default), sccache will be useless.
|
||||
RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \
|
||||
cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
export BUILDKITE_BUILD_CHECKOUT_PATH=/var/lib/buildkite-agent/build
|
||||
EOF
|
||||
|
||||
RUN chmod 744 /var/lib/buildkite-agent/hooks/environment
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
getEmoji,
|
||||
getEnv,
|
||||
getLastSuccessfulBuild,
|
||||
getSecret,
|
||||
isBuildkite,
|
||||
isBuildManual,
|
||||
isFork,
|
||||
@@ -31,7 +30,7 @@ import {
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows" | "freebsd"} Os
|
||||
* @typedef {"linux" | "darwin" | "windows"} Os
|
||||
* @typedef {"aarch64" | "x64"} Arch
|
||||
* @typedef {"musl"} Abi
|
||||
* @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
|
||||
@@ -114,7 +113,6 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
{ os: "freebsd", arch: "x64", release: "14.3" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -557,6 +555,7 @@ function getBuildBunStep(platform, options) {
|
||||
/**
|
||||
* @typedef {Object} TestOptions
|
||||
* @property {string} [buildId]
|
||||
* @property {boolean} [unifiedTests]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [dryRun]
|
||||
*/
|
||||
@@ -569,7 +568,7 @@ function getBuildBunStep(platform, options) {
|
||||
*/
|
||||
function getTestBunStep(platform, options, testOptions = {}) {
|
||||
const { os, profile } = platform;
|
||||
const { buildId, testFiles } = testOptions;
|
||||
const { buildId, unifiedTests, testFiles } = testOptions;
|
||||
|
||||
const args = [`--step=${getTargetKey(platform)}-build-bun`];
|
||||
if (buildId) {
|
||||
@@ -591,7 +590,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
agents: getTestAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: os === "darwin" ? 2 : 10,
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
@@ -660,7 +659,7 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
agents: {
|
||||
queue: "test-darwin",
|
||||
},
|
||||
depends_on: buildPlatforms.filter(p => p.os !== "freebsd").map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
env: {
|
||||
CANARY: revision,
|
||||
},
|
||||
@@ -773,6 +772,8 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [unifiedBuilds]
|
||||
* @property {boolean} [unifiedTests]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -943,6 +944,22 @@ function getOptionsStep() {
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
@@ -1008,6 +1025,8 @@ async function getPipelineOptions() {
|
||||
buildImages: parseBoolean(options["build-images"]),
|
||||
publishImages: parseBoolean(options["publish-images"]),
|
||||
testFiles: parseArray(options["test-files"]),
|
||||
unifiedBuilds: parseBoolean(options["unified-builds"]),
|
||||
unifiedTests: parseBoolean(options["unified-tests"]),
|
||||
buildPlatforms: buildPlatformKeys?.length
|
||||
? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile })))
|
||||
: Array.from(buildPlatformsMap.values()),
|
||||
@@ -1073,7 +1092,7 @@ async function getPipeline(options = {}) {
|
||||
const imagePlatforms = new Map(
|
||||
buildImages || publishImages
|
||||
? [...buildPlatforms, ...testPlatforms]
|
||||
.filter(({ os }) => os !== "darwin")
|
||||
.filter(({ os }) => os === "linux" || os === "windows")
|
||||
.map(platform => [getImageKey(platform), platform])
|
||||
: [],
|
||||
);
|
||||
@@ -1089,7 +1108,7 @@ async function getPipeline(options = {}) {
|
||||
});
|
||||
}
|
||||
|
||||
let { skipBuilds, forceBuilds, dryRun } = options;
|
||||
let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options;
|
||||
dryRun = dryRun || !!buildImages;
|
||||
|
||||
/** @type {string | undefined} */
|
||||
@@ -1107,13 +1126,10 @@ async function getPipeline(options = {}) {
|
||||
const includeASAN = !isMainBranch();
|
||||
|
||||
if (!buildId) {
|
||||
let relevantBuildPlatforms = includeASAN
|
||||
const relevantBuildPlatforms = includeASAN
|
||||
? buildPlatforms
|
||||
: buildPlatforms.filter(({ profile }) => profile !== "asan");
|
||||
|
||||
// run build-image but no build-bun yet
|
||||
relevantBuildPlatforms = relevantBuildPlatforms.filter(({ os }) => os !== "freebsd");
|
||||
|
||||
steps.push(
|
||||
...relevantBuildPlatforms.map(target => {
|
||||
const imageKey = getImageKey(target);
|
||||
@@ -1123,16 +1139,13 @@ async function getPipeline(options = {}) {
|
||||
dependsOn.push(`${imageKey}-build-image`);
|
||||
}
|
||||
|
||||
const steps = [];
|
||||
steps.push(getBuildCppStep(target, options));
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps,
|
||||
steps: unifiedBuilds
|
||||
? [getBuildBunStep(target, options)]
|
||||
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
|
||||
},
|
||||
...dependsOn,
|
||||
);
|
||||
@@ -1141,13 +1154,13 @@ async function getPipeline(options = {}) {
|
||||
}
|
||||
|
||||
if (!isMainBranch()) {
|
||||
const { skipTests, forceTests, testFiles } = options;
|
||||
const { skipTests, forceTests, unifiedTests, testFiles } = options;
|
||||
if (!skipTests || forceTests) {
|
||||
steps.push(
|
||||
...testPlatforms.map(target => ({
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps: [getTestBunStep(target, options, { testFiles, buildId })],
|
||||
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
|
||||
})),
|
||||
);
|
||||
}
|
||||
@@ -1190,43 +1203,6 @@ async function main() {
|
||||
console.log("Generated options:", options);
|
||||
}
|
||||
|
||||
startGroup("Querying GitHub for files...");
|
||||
if (options && isBuildkite && !isMainBranch()) {
|
||||
/** @type {string[]} */
|
||||
let allFiles = [];
|
||||
/** @type {string[]} */
|
||||
let newFiles = [];
|
||||
let prFileCount = 0;
|
||||
try {
|
||||
console.log("on buildkite: collecting new files from PR");
|
||||
const per_page = 50;
|
||||
const { BUILDKITE_PULL_REQUEST } = process.env;
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
const res = await fetch(
|
||||
`https://api.github.com/repos/oven-sh/bun/pulls/${BUILDKITE_PULL_REQUEST}/files?per_page=${per_page}&page=${i}`,
|
||||
{ headers: { Authorization: `Bearer ${getSecret("GITHUB_TOKEN")}` } },
|
||||
);
|
||||
const doc = await res.json();
|
||||
console.log(`-> page ${i}, found ${doc.length} items`);
|
||||
if (doc.length === 0) break;
|
||||
for (const { filename, status } of doc) {
|
||||
prFileCount += 1;
|
||||
allFiles.push(filename);
|
||||
if (status !== "added") continue;
|
||||
newFiles.push(filename);
|
||||
}
|
||||
if (doc.length < per_page) break;
|
||||
}
|
||||
console.log(`- PR ${BUILDKITE_PULL_REQUEST}, ${prFileCount} files, ${newFiles.length} new files`);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
if (allFiles.every(filename => filename.startsWith("docs/"))) {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
const pipeline = await getPipeline(options);
|
||||
if (!pipeline) {
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -16,7 +16,6 @@
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
|
||||
2
.github/workflows/format.yml
vendored
2
.github/workflows/format.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
pull_request:
|
||||
merge_group:
|
||||
env:
|
||||
BUN_VERSION: "1.3.2"
|
||||
BUN_VERSION: "1.2.20"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
|
||||
@@ -9,6 +9,3 @@ test/snippets
|
||||
test/js/node/test
|
||||
test/napi/node-napi-tests
|
||||
bun.lock
|
||||
|
||||
# the output codeblocks need to stay minified
|
||||
docs/bundler/minifier.mdx
|
||||
|
||||
26
CLAUDE.md
26
CLAUDE.md
@@ -38,36 +38,16 @@ If no valid issue number is provided, find the best existing file to modify inst
|
||||
|
||||
### Writing Tests
|
||||
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures.
|
||||
|
||||
- For **single-file tests**, prefer `-e` over `tempDir`.
|
||||
- For **multi-file tests**, prefer `tempDir` and `Bun.spawn`.
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
|
||||
test("(single-file test) my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", "console.log('Hello, world!')"],
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(normalizeBunSnapshot(stdout)).toMatchInlineSnapshot(`"Hello, world!"`);
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("(multi-file test) my feature", async () => {
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
using dir = tempDir("test-prefix", {
|
||||
"index.js": `import { foo } from "./foo.ts"; foo();`,
|
||||
"foo.ts": `export function foo() { console.log("foo"); }`,
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
// Spawn Bun process
|
||||
|
||||
@@ -25,6 +25,16 @@ if(CMAKE_HOST_APPLE)
|
||||
endif()
|
||||
include(SetupLLVM)
|
||||
|
||||
find_program(SCCACHE_PROGRAM sccache)
|
||||
if(SCCACHE_PROGRAM AND NOT DEFINED ENV{NO_SCCACHE})
|
||||
include(SetupSccache)
|
||||
else()
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
include(SetupCcache)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- Project ---
|
||||
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
@@ -47,16 +57,6 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
find_program(SCCACHE_PROGRAM sccache)
|
||||
if(SCCACHE_PROGRAM AND NOT DEFINED ENV{NO_SCCACHE})
|
||||
include(SetupSccache)
|
||||
else()
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
include(SetupCcache)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
|
||||
@@ -201,7 +201,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug logs into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
10
README.md
10
README.md
@@ -54,7 +54,7 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
curl -fsSL https://bun.com/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
powershell -c "irm bun.com/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
@@ -104,13 +104,13 @@ bun upgrade --canary
|
||||
- [File types (Loaders)](https://bun.com/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.com/docs/runtime/typescript)
|
||||
- [JSX](https://bun.com/docs/runtime/jsx)
|
||||
- [Environment variables](https://bun.com/docs/runtime/environment-variables)
|
||||
- [Environment variables](https://bun.com/docs/runtime/env)
|
||||
- [Bun APIs](https://bun.com/docs/runtime/bun-apis)
|
||||
- [Web APIs](https://bun.com/docs/runtime/web-apis)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-compat)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis)
|
||||
- [Single-file executable](https://bun.com/docs/bundler/executables)
|
||||
- [Plugins](https://bun.com/docs/runtime/plugins)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/watch-mode)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot)
|
||||
- [Module resolution](https://bun.com/docs/runtime/modules)
|
||||
- [Auto-install](https://bun.com/docs/runtime/autoimport)
|
||||
- [bunfig.toml](https://bun.com/docs/runtime/bunfig)
|
||||
@@ -230,7 +230,7 @@ bun upgrade --canary
|
||||
|
||||
- Ecosystem
|
||||
- [Use React and JSX](https://bun.com/guides/ecosystem/react)
|
||||
- [Use Gel with Bun](https://bun.com/guides/ecosystem/gel)
|
||||
- [Use EdgeDB with Bun](https://bun.com/guides/ecosystem/edgedb)
|
||||
- [Use Prisma with Bun](https://bun.com/guides/ecosystem/prisma)
|
||||
- [Add Sentry to a Bun app](https://bun.com/guides/ecosystem/sentry)
|
||||
- [Create a Discord bot](https://bun.com/guides/ecosystem/discordjs)
|
||||
|
||||
@@ -13,4 +13,7 @@ export function run(opts = {}) {
|
||||
}
|
||||
|
||||
export const bench = Mitata.bench;
|
||||
export const group = Mitata.group;
|
||||
|
||||
export function group(_name, fn) {
|
||||
return Mitata.group(fn);
|
||||
}
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const runAll = !process.argv.includes("--simple");
|
||||
|
||||
const small = new Uint8Array(1024);
|
||||
const medium = new Uint8Array(1024 * 100);
|
||||
const large = new Uint8Array(1024 * 1024);
|
||||
|
||||
for (let i = 0; i < large.length; i++) {
|
||||
const value = Math.floor(Math.sin(i / 100) * 128 + 128);
|
||||
if (i < small.length) small[i] = value;
|
||||
if (i < medium.length) medium[i] = value;
|
||||
large[i] = value;
|
||||
}
|
||||
|
||||
const format = new Intl.NumberFormat("en-US", { notation: "compact", unit: "byte" });
|
||||
|
||||
async function compress(data, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function decompress(data, format) {
|
||||
const ds = new DecompressionStream(format);
|
||||
const writer = ds.writable.getWriter();
|
||||
const reader = ds.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function roundTrip(data, format) {
|
||||
const compressed = await compress(data, format);
|
||||
return await decompress(compressed, format);
|
||||
}
|
||||
|
||||
const formats = ["deflate", "gzip", "deflate-raw"];
|
||||
if (runAll) formats.push("brotli", "zstd");
|
||||
|
||||
// Small data benchmarks (1KB)
|
||||
group(`CompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(small, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Medium data benchmarks (100KB)
|
||||
group(`CompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(medium, fmt));
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
|
||||
// Large data benchmarks (1MB)
|
||||
group(`CompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const compressedData = {};
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
compressedData[fmt] = {
|
||||
small: await compress(small, fmt),
|
||||
medium: await compress(medium, fmt),
|
||||
large: await compress(large, fmt),
|
||||
};
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
|
||||
group(`DecompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].small, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].medium, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].large, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`roundtrip ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await roundTrip(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
57
build.zig
57
build.zig
@@ -18,6 +18,22 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
|
||||
|
||||
const pathRel = fs.path.relative;
|
||||
|
||||
/// When updating this, make sure to adjust SetupZig.cmake
|
||||
const recommended_zig_version = "0.14.0";
|
||||
|
||||
// comptime {
|
||||
// if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
|
||||
// @compileError(
|
||||
// "" ++
|
||||
// "Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
|
||||
// builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
|
||||
// "CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
|
||||
// "upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
|
||||
// "comment this error out.",
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
const zero_sha = "0000000000000000000000000000000000000000";
|
||||
|
||||
const BunBuildOptions = struct {
|
||||
@@ -32,7 +48,6 @@ const BunBuildOptions = struct {
|
||||
/// enable debug logs in release builds
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_fuzzilli: bool,
|
||||
enable_valgrind: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
@@ -82,10 +97,9 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
|
||||
@@ -120,8 +134,8 @@ pub fn getOSVersionMin(os: OperatingSystem) ?Target.Query.OsVersion {
|
||||
|
||||
pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
|
||||
return switch (os) {
|
||||
// Compiling with a newer glibc than this will break certain cloud environments. See symbols.test.ts.
|
||||
.linux => .{ .major = 2, .minor = 26, .patch = 0 },
|
||||
// Compiling with a newer glibc than this will break certain cloud environments.
|
||||
.linux => .{ .major = 2, .minor = 27, .patch = 0 },
|
||||
|
||||
else => null,
|
||||
};
|
||||
@@ -257,7 +271,6 @@ pub fn build(b: *Build) !void {
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
@@ -277,16 +290,14 @@ pub fn build(b: *Build) !void {
|
||||
var o = build_options;
|
||||
var unit_tests = b.addTest(.{
|
||||
.name = "bun-test",
|
||||
.optimize = build_options.optimize,
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.test_runner = .{ .path = b.path("src/main_test.zig"), .mode = .simple },
|
||||
.root_module = b.createModule(.{
|
||||
.optimize = build_options.optimize,
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.target = build_options.target,
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false,
|
||||
}),
|
||||
.target = build_options.target,
|
||||
.use_llvm = !build_options.no_llvm,
|
||||
.use_lld = if (build_options.os == .mac) false else !build_options.no_llvm,
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false,
|
||||
});
|
||||
configureObj(b, &o, unit_tests);
|
||||
// Setting `linker_allow_shlib_undefined` causes the linker to ignore
|
||||
@@ -320,7 +331,6 @@ pub fn build(b: *Build) !void {
|
||||
var step = b.step("check", "Check for semantic analysis errors");
|
||||
var bun_check_obj = addBunObject(b, &build_options);
|
||||
bun_check_obj.generated_bin = null;
|
||||
// bun_check_obj.use_llvm = false;
|
||||
step.dependOn(&bun_check_obj.step);
|
||||
|
||||
// The default install step will run zig build check. This is so ZLS
|
||||
@@ -493,7 +503,6 @@ fn addMultiCheck(
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.enable_fuzzilli = root_build_options.enable_fuzzilli,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
@@ -607,22 +616,15 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0;
|
||||
}
|
||||
|
||||
obj.no_link_obj = opts.os != .windows;
|
||||
|
||||
obj.no_link_obj = true;
|
||||
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
if (opts.enable_fuzzilli) {
|
||||
obj.sanitize_coverage_trace_pc_guard = true;
|
||||
}
|
||||
obj.root_module.sanitize_address = true;
|
||||
} else {
|
||||
const fail_step = b.addFail("asan is not supported on this platform");
|
||||
obj.step.dependOn(&fail_step.step);
|
||||
}
|
||||
} else if (opts.enable_fuzzilli) {
|
||||
const fail_step = b.addFail("fuzzilli requires asan");
|
||||
obj.step.dependOn(&fail_step.step);
|
||||
}
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.bundle_ubsan_rt = false;
|
||||
@@ -777,13 +779,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
{
|
||||
const ciInfoImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "ci_info.zig"),
|
||||
});
|
||||
mod.addImport("ci_info", ciInfoImport);
|
||||
ciInfoImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
@@ -809,7 +804,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
fn propagateImports(source_mod: *Module) !void {
|
||||
var seen = std.AutoHashMap(*Module, void).init(source_mod.owner.graph.arena);
|
||||
defer seen.deinit();
|
||||
var queue = std.array_list.Managed(*Module).init(source_mod.owner.graph.arena);
|
||||
var queue = std.ArrayList(*Module).init(source_mod.owner.graph.arena);
|
||||
defer queue.deinit();
|
||||
try queue.appendSlice(source_mod.import_table.values());
|
||||
while (queue.pop()) |mod| {
|
||||
|
||||
51
bun.lock
51
bun.lock
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun",
|
||||
@@ -32,6 +31,12 @@
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
},
|
||||
},
|
||||
},
|
||||
"overrides": {
|
||||
@@ -85,13 +90,13 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.3.0", "", {}, "sha512-L9X8uHCYU310o99L3/MpJKYxPzXPOS7S0NmBaM7UO/x2Kb2WbmMLSkfvdr1KxRIFYOpbY0Jhn7CfLSUDzL8arQ=="],
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.3", "", { "dependencies": { "@lezer/common": "^1.3.0" } }, "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g=="],
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.3", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-yenN5SqAxAPv/qMnpWW0AT7l+SxVrgG+u0tNsRQWqbrz66HIl8DnEbBObvy21J5K7+I1v7gsAnlE2VQ5yYVSeA=="],
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="],
|
||||
|
||||
@@ -145,7 +150,7 @@
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.4", "", {}, "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.159", "", {}, "sha512-SAP22WSGNN12OQ8PlCzGzRCZ7QDCwI85dQZbmpz7+mAk+L7j+wI7qnvmdKh+o7A5LaOp6QnOZ2NJphAZQTTHQg=="],
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
"@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="],
|
||||
|
||||
@@ -155,7 +160,9 @@
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="],
|
||||
"@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EhBeSYX0Y6ye8pNebpKrwFJq7BoQ8J5SO6NlvNwwHjSj6adXJViPQrKlsyPw7hLBLvckEMO1yxeGdR82YBBlDg=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
@@ -185,9 +192,11 @@
|
||||
|
||||
"constant-case": ["constant-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case": "^2.0.2" } }, "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
|
||||
|
||||
"dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="],
|
||||
|
||||
@@ -211,29 +220,27 @@
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
"lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
@@ -289,7 +296,7 @@
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="],
|
||||
|
||||
@@ -305,7 +312,7 @@
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
|
||||
@@ -10,4 +10,4 @@ preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
minimumReleaseAge = 259200 # three days
|
||||
minimumReleaseAge = 1
|
||||
|
||||
@@ -51,23 +51,6 @@ if(ENABLE_ASAN)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ENABLE_FUZZILLI)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable coverage instrumentation for fuzzing"
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
|
||||
register_linker_flags(
|
||||
DESCRIPTION "Link coverage instrumentation"
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Enable fuzzilli-specific code"
|
||||
-DFUZZILLI_ENABLED
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Optimization level ---
|
||||
if(DEBUG)
|
||||
register_compiler_flags(
|
||||
|
||||
@@ -125,8 +125,7 @@ setx(CWD ${CMAKE_SOURCE_DIR})
|
||||
setx(BUILD_PATH ${CMAKE_BINARY_DIR})
|
||||
|
||||
optionx(CACHE_PATH FILEPATH "The path to the cache directory" DEFAULT ${BUILD_PATH}/cache)
|
||||
optionx(CACHE_STRATEGY "auto|distributed|local|none" "The strategy to use for caching" DEFAULT
|
||||
"auto")
|
||||
optionx(CACHE_STRATEGY "read-write|read-only|none" "The strategy to use for caching" DEFAULT "read-write")
|
||||
|
||||
optionx(CI BOOL "If CI is enabled" DEFAULT OFF)
|
||||
optionx(ENABLE_ANALYSIS BOOL "If static analysis targets should be enabled" DEFAULT OFF)
|
||||
@@ -142,39 +141,9 @@ optionx(TMP_PATH FILEPATH "The path to the temporary directory" DEFAULT ${BUILD_
|
||||
|
||||
# --- Helper functions ---
|
||||
|
||||
# list_filter_out_regex()
|
||||
#
|
||||
# Description:
|
||||
# Filters out elements from a list that match a regex pattern.
|
||||
#
|
||||
# Arguments:
|
||||
# list - The list of strings to traverse
|
||||
# pattern - The regex pattern to filter out
|
||||
# touched - A variable to set if any items were removed
|
||||
function(list_filter_out_regex list pattern touched)
|
||||
set(result_list "${${list}}")
|
||||
set(keep_list)
|
||||
set(was_modified OFF)
|
||||
|
||||
foreach(line IN LISTS result_list)
|
||||
if(line MATCHES "${pattern}")
|
||||
set(was_modified ON)
|
||||
else()
|
||||
list(APPEND keep_list ${line})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(${list} "${keep_list}" PARENT_SCOPE)
|
||||
set(${touched} ${was_modified} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# setenv()
|
||||
# Description:
|
||||
# Sets an environment variable during the build step, and writes it to a .env file.
|
||||
#
|
||||
# See Also:
|
||||
# unsetenv()
|
||||
#
|
||||
# Arguments:
|
||||
# variable string - The variable to set
|
||||
# value string - The value to set the variable to
|
||||
@@ -187,7 +156,13 @@ function(setenv variable value)
|
||||
|
||||
if(EXISTS ${ENV_PATH})
|
||||
file(STRINGS ${ENV_PATH} ENV_FILE ENCODING UTF-8)
|
||||
list_filter_out_regex(ENV_FILE "^${variable}=" ENV_MODIFIED)
|
||||
|
||||
foreach(line ${ENV_FILE})
|
||||
if(line MATCHES "^${variable}=")
|
||||
list(REMOVE_ITEM ENV_FILE ${line})
|
||||
set(ENV_MODIFIED ON)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(ENV_MODIFIED)
|
||||
list(APPEND ENV_FILE "${variable}=${value}")
|
||||
@@ -203,28 +178,6 @@ function(setenv variable value)
|
||||
message(STATUS "Set ENV ${variable}: ${value}")
|
||||
endfunction()
|
||||
|
||||
# See setenv()
|
||||
# Description:
|
||||
# Exact opposite of setenv().
|
||||
# Arguments:
|
||||
# variable string - The variable to unset.
|
||||
# See Also:
|
||||
# setenv()
|
||||
function(unsetenv variable)
|
||||
set(ENV_PATH ${BUILD_PATH}/.env)
|
||||
if(NOT EXISTS ${ENV_PATH})
|
||||
return()
|
||||
endif()
|
||||
|
||||
file(STRINGS ${ENV_PATH} ENV_FILE ENCODING UTF-8)
|
||||
list_filter_out_regex(ENV_FILE "^${variable}=" ENV_MODIFIED)
|
||||
|
||||
if(ENV_MODIFIED)
|
||||
list(JOIN ENV_FILE "\n" ENV_FILE)
|
||||
file(WRITE ${ENV_PATH} ${ENV_FILE})
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# satisfies_range()
|
||||
# Description:
|
||||
# Check if a version satisfies a version range or list of ranges
|
||||
|
||||
@@ -127,8 +127,6 @@ if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_FUZZILLI BOOL "If fuzzilli support should be enabled" DEFAULT OFF)
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
else()
|
||||
|
||||
@@ -34,6 +34,26 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(CLANG_FORMAT_CHANGED_SOURCES)
|
||||
foreach(source ${CLANG_FORMAT_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND CLANG_FORMAT_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(CLANG_FORMAT_CHANGED_SOURCES)
|
||||
set(CLANG_FORMAT_DIFF_COMMAND ${CLANG_FORMAT_PROGRAM}
|
||||
-i # edits files in-place
|
||||
--verbose
|
||||
${CLANG_FORMAT_CHANGED_SOURCES}
|
||||
)
|
||||
else()
|
||||
set(CLANG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-format")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
clang-format-diff
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
|
||||
|
||||
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
|
||||
-p ${BUILD_PATH}
|
||||
-p ${BUILD_PATH}
|
||||
--config-file=${CWD}/.clang-tidy
|
||||
)
|
||||
|
||||
@@ -40,6 +40,27 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(CLANG_TIDY_CHANGED_SOURCES)
|
||||
foreach(source ${CLANG_TIDY_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(CLANG_TIDY_CHANGED_SOURCES)
|
||||
set(CLANG_TIDY_DIFF_COMMAND ${CLANG_TIDY_PROGRAM}
|
||||
${CLANG_TIDY_CHANGED_SOURCES}
|
||||
--fix
|
||||
--fix-errors
|
||||
--fix-notes
|
||||
)
|
||||
else()
|
||||
set(CLANG_TIDY_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-tidy")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
clang-tidy-diff
|
||||
|
||||
@@ -92,6 +92,26 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(PRETTIER_CHANGED_SOURCES)
|
||||
foreach(source ${PRETTIER_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND PRETTIER_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(PRETTIER_CHANGED_SOURCES)
|
||||
set(PRETTIER_DIFF_COMMAND ${PRETTIER_COMMAND}
|
||||
--write
|
||||
--plugin=prettier-plugin-organize-imports
|
||||
${PRETTIER_CHANGED_SOURCES}
|
||||
)
|
||||
else()
|
||||
set(PRETTIER_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for prettier")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
prettier-diff
|
||||
|
||||
@@ -25,6 +25,25 @@ register_command(
|
||||
ALWAYS_RUN
|
||||
)
|
||||
|
||||
if(GIT_CHANGED_SOURCES)
|
||||
set(ZIG_FORMAT_CHANGED_SOURCES)
|
||||
foreach(source ${ZIG_FORMAT_SOURCES})
|
||||
list(FIND GIT_CHANGED_SOURCES ${source} index)
|
||||
if(NOT ${index} EQUAL -1)
|
||||
list(APPEND ZIG_FORMAT_CHANGED_SOURCES ${source})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(ZIG_FORMAT_CHANGED_SOURCES)
|
||||
set(ZIG_FORMAT_DIFF_COMMAND ${ZIG_EXECUTABLE}
|
||||
fmt
|
||||
${ZIG_FORMAT_CHANGED_SOURCES}
|
||||
)
|
||||
else()
|
||||
set(ZIG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for zig-format")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
zig-format-diff
|
||||
|
||||
@@ -317,10 +317,6 @@ set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
set(BUN_CI_INFO_OUTPUTS
|
||||
${CODEGEN_PATH}/ci_info.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
@@ -338,21 +334,6 @@ register_command(
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-ci-info
|
||||
COMMENT
|
||||
"Generating CI info"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/ci_info.ts
|
||||
${CODEGEN_PATH}/ci_info.zig
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CI_INFO_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -631,7 +612,6 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
${BUN_CI_INFO_OUTPUTS}
|
||||
${BUN_BINDGENV2_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
@@ -695,7 +675,6 @@ register_command(
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_fuzzilli=$<IF:$<BOOL:${ENABLE_FUZZILLI}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
|
||||
@@ -4,9 +4,41 @@ find_command(
|
||||
COMMAND
|
||||
git
|
||||
REQUIRED
|
||||
${CI}
|
||||
OFF
|
||||
)
|
||||
|
||||
if(NOT GIT_PROGRAM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(GIT_DIFF_COMMAND ${GIT_PROGRAM} diff --no-color --name-only --diff-filter=AMCR origin/main HEAD)
|
||||
|
||||
execute_process(
|
||||
COMMAND
|
||||
${GIT_DIFF_COMMAND}
|
||||
WORKING_DIRECTORY
|
||||
${CWD}
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
OUTPUT_VARIABLE
|
||||
GIT_DIFF
|
||||
ERROR_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_VARIABLE
|
||||
GIT_DIFF_ERROR
|
||||
RESULT_VARIABLE
|
||||
GIT_DIFF_RESULT
|
||||
)
|
||||
|
||||
if(NOT GIT_DIFF_RESULT EQUAL 0)
|
||||
message(WARNING "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
|
||||
return()
|
||||
endif()
|
||||
|
||||
string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}")
|
||||
|
||||
if(CI)
|
||||
set(GIT_CHANGED_SOURCES "${GIT_CHANGED_SOURCES}")
|
||||
message(STATUS "Set GIT_CHANGED_SOURCES: ${GIT_CHANGED_SOURCES}")
|
||||
endif()
|
||||
|
||||
list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/)
|
||||
list(LENGTH GIT_CHANGED_SOURCES GIT_CHANGED_SOURCES_COUNT)
|
||||
|
||||
@@ -1,108 +1,60 @@
|
||||
# Setup sccache as the C and C++ compiler launcher to speed up builds by caching
|
||||
if(CACHE_STRATEGY STREQUAL "none")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(SCCACHE_SHARED_CACHE_REGION "us-west-1")
|
||||
set(SCCACHE_SHARED_CACHE_BUCKET "bun-build-sccache-store")
|
||||
|
||||
# Function to check if the system AWS credentials have access to the sccache S3 bucket.
|
||||
function(check_aws_credentials OUT_VAR)
|
||||
# Install dependencies first
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} install --frozen-lockfile
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/scripts/build-cache
|
||||
RESULT_VARIABLE INSTALL_EXIT_CODE
|
||||
OUTPUT_VARIABLE INSTALL_OUTPUT
|
||||
ERROR_VARIABLE INSTALL_ERROR
|
||||
)
|
||||
set(HAS_CREDENTIALS FALSE)
|
||||
|
||||
if(NOT INSTALL_EXIT_CODE EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to install dependencies in scripts/build-cache\n"
|
||||
"Exit code: ${INSTALL_EXIT_CODE}\n"
|
||||
"Output: ${INSTALL_OUTPUT}\n"
|
||||
"Error: ${INSTALL_ERROR}")
|
||||
if(DEFINED ENV{AWS_ACCESS_KEY_ID} AND DEFINED ENV{AWS_SECRET_ACCESS_KEY})
|
||||
set(HAS_CREDENTIALS TRUE)
|
||||
message(NOTICE
|
||||
"sccache: Using AWS credentials found in environment variables")
|
||||
endif()
|
||||
|
||||
# Check AWS credentials
|
||||
execute_process(
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
have-access.ts
|
||||
--bucket ${SCCACHE_SHARED_CACHE_BUCKET}
|
||||
--region ${SCCACHE_SHARED_CACHE_REGION}
|
||||
WORKING_DIRECTORY
|
||||
${CMAKE_SOURCE_DIR}/scripts/build-cache
|
||||
RESULT_VARIABLE HAVE_ACCESS_EXIT_CODE
|
||||
)
|
||||
# Check for ~/.aws directory since sccache may use that.
|
||||
if(NOT HAS_CREDENTIALS)
|
||||
if(WIN32)
|
||||
set(AWS_CONFIG_DIR "$ENV{USERPROFILE}/.aws")
|
||||
else()
|
||||
set(AWS_CONFIG_DIR "$ENV{HOME}/.aws")
|
||||
endif()
|
||||
|
||||
if(HAVE_ACCESS_EXIT_CODE EQUAL 0)
|
||||
set(HAS_CREDENTIALS TRUE)
|
||||
else()
|
||||
set(HAS_CREDENTIALS FALSE)
|
||||
if(EXISTS "${AWS_CONFIG_DIR}/credentials")
|
||||
set(HAS_CREDENTIALS TRUE)
|
||||
message(NOTICE
|
||||
"sccache: Using AWS credentials found in ${AWS_CONFIG_DIR}/credentials")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(${OUT_VAR} ${HAS_CREDENTIALS} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Configure sccache to use the local cache only.
|
||||
function(sccache_configure_local_filesystem)
|
||||
unsetenv(SCCACHE_BUCKET)
|
||||
unsetenv(SCCACHE_REGION)
|
||||
setenv(SCCACHE_DIR "${CACHE_PATH}/sccache")
|
||||
endfunction()
|
||||
function(check_running_in_ci OUT_VAR)
|
||||
set(IS_CI FALSE)
|
||||
|
||||
# Configure sccache to use the distributed cache (S3 + local).
|
||||
function(sccache_configure_distributed)
|
||||
setenv(SCCACHE_BUCKET "${SCCACHE_SHARED_CACHE_BUCKET}")
|
||||
setenv(SCCACHE_REGION "${SCCACHE_SHARED_CACHE_REGION}")
|
||||
setenv(SCCACHE_DIR "${CACHE_PATH}/sccache")
|
||||
endfunction()
|
||||
# Query EC2 instance metadata service to check if running on buildkite-agent
|
||||
# The IP address 169.254.169.254 is a well-known link-local address for querying EC2 instance
|
||||
# metdata:
|
||||
# https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
|
||||
execute_process(
|
||||
COMMAND curl -s -m 0.5 http://169.254.169.254/latest/meta-data/tags/instance/Service
|
||||
OUTPUT_VARIABLE METADATA_OUTPUT
|
||||
ERROR_VARIABLE METADATA_ERROR
|
||||
RESULT_VARIABLE METADATA_RESULT
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
)
|
||||
|
||||
function(sccache_configure_environment_ci)
|
||||
if(CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "distributed")
|
||||
check_aws_credentials(HAS_AWS_CREDENTIALS)
|
||||
if(HAS_AWS_CREDENTIALS)
|
||||
sccache_configure_distributed()
|
||||
message(NOTICE "sccache: Using distributed cache strategy.")
|
||||
else()
|
||||
message(FATAL_ERROR "CI CACHE_STRATEGY is set to '${CACHE_STRATEGY}', but no valid AWS "
|
||||
"credentials were found. Note that 'auto' requires AWS credentials to access the shared "
|
||||
"cache in CI.")
|
||||
endif()
|
||||
elseif(CACHE_STRATEGY STREQUAL "local")
|
||||
# We disallow this because we want our CI runs to always used the shared cache to accelerate
|
||||
# builds.
|
||||
# none, distributed and auto are all okay.
|
||||
#
|
||||
# If local is configured, it's as good as "none", so this is probably user error.
|
||||
message(FATAL_ERROR "CI CACHE_STRATEGY is set to 'local', which is not allowed.")
|
||||
# Check if the request succeeded and returned exactly "buildkite-agent"
|
||||
if(METADATA_RESULT EQUAL 0 AND METADATA_OUTPUT STREQUAL "buildkite-agent")
|
||||
set(IS_CI TRUE)
|
||||
endif()
|
||||
|
||||
set(${OUT_VAR} ${IS_CI} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(sccache_configure_environment_developer)
|
||||
# Local environments can use any strategy they like. S3 is set up in such a way so as to clean
|
||||
# itself from old entries automatically.
|
||||
if (CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "local")
|
||||
# In the local environment, we prioritize using the local cache. This is because sccache takes
|
||||
# into consideration the whole absolute path of the files being compiled, and it's very
|
||||
# unlikely users will have the same absolute paths on their local machines.
|
||||
sccache_configure_local_filesystem()
|
||||
message(NOTICE "sccache: Using local cache strategy.")
|
||||
elseif(CACHE_STRATEGY STREQUAL "distributed")
|
||||
check_aws_credentials(HAS_AWS_CREDENTIALS)
|
||||
if(HAS_AWS_CREDENTIALS)
|
||||
sccache_configure_distributed()
|
||||
message(NOTICE "sccache: Using distributed cache strategy.")
|
||||
else()
|
||||
message(FATAL_ERROR "CACHE_STRATEGY is set to 'distributed', but no valid AWS credentials "
|
||||
"were found.")
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
find_command(VARIABLE SCCACHE_PROGRAM COMMAND sccache REQUIRED ${CI})
|
||||
check_running_in_ci(IS_IN_CI)
|
||||
find_command(VARIABLE SCCACHE_PROGRAM COMMAND sccache REQUIRED ${IS_IN_CI})
|
||||
if(NOT SCCACHE_PROGRAM)
|
||||
message(WARNING "sccache not found. Your builds will be slower.")
|
||||
return()
|
||||
@@ -114,10 +66,25 @@ foreach(arg ${SCCACHE_ARGS})
|
||||
list(APPEND CMAKE_ARGS -D${arg}=${${arg}})
|
||||
endforeach()
|
||||
|
||||
# Configure S3 bucket for distributed caching
|
||||
setenv(SCCACHE_BUCKET "bun-build-sccache-store")
|
||||
setenv(SCCACHE_REGION "us-west-1")
|
||||
setenv(SCCACHE_DIR "${CACHE_PATH}/sccache")
|
||||
|
||||
# Handle credentials based on cache strategy
|
||||
if (CACHE_STRATEGY STREQUAL "read-only")
|
||||
setenv(SCCACHE_S3_NO_CREDENTIALS "1")
|
||||
message(STATUS "sccache configured in read-only mode.")
|
||||
else()
|
||||
# Check for AWS credentials and enable anonymous access if needed
|
||||
check_aws_credentials(HAS_AWS_CREDENTIALS)
|
||||
if(NOT IS_IN_CI AND NOT HAS_AWS_CREDENTIALS)
|
||||
setenv(SCCACHE_S3_NO_CREDENTIALS "1")
|
||||
message(NOTICE "sccache: No AWS credentials found, enabling anonymous S3 "
|
||||
"access. Writing to the cache will be disabled.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
setenv(SCCACHE_LOG "info")
|
||||
|
||||
if (CI)
|
||||
sccache_configure_environment_ci()
|
||||
else()
|
||||
sccache_configure_environment_developer()
|
||||
endif()
|
||||
message(STATUS "sccache configured for bun-build-sccache-store (us-west-1).")
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "c1423ff3fc7064635773a4a4616c5bf986eb00fe")
|
||||
set(ZIG_COMMIT "55fdbfa0c86be86b68d43a4ba761e6909eb0d7b2")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -55,7 +55,13 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
|
||||
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
|
||||
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
|
||||
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${CI})
|
||||
if(CI)
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT ON)
|
||||
else()
|
||||
set(ZIG_COMPILER_SAFE_DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT})
|
||||
|
||||
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
|
||||
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})
|
||||
|
||||
@@ -34,7 +34,7 @@ By default, Bun's CSS bundler targets the following browsers:
|
||||
|
||||
The CSS Nesting specification allows you to write more concise and intuitive stylesheets by nesting selectors inside one another. Instead of repeating parent selectors across your CSS file, you can write child styles directly within their parent blocks.
|
||||
|
||||
```scss title="styles.css" icon="file-code"
|
||||
```css title="styles.css" icon="file-code"
|
||||
/* With nesting */
|
||||
.card {
|
||||
background: white;
|
||||
@@ -72,7 +72,7 @@ Bun's CSS bundler automatically converts this nested syntax into traditional fla
|
||||
|
||||
You can also nest media queries and other at-rules inside selectors, eliminating the need to repeat selector patterns:
|
||||
|
||||
```scss title="styles.css" icon="file-code"
|
||||
```css title="styles.css" icon="file-code"
|
||||
.responsive-element {
|
||||
display: block;
|
||||
|
||||
@@ -100,7 +100,7 @@ This compiles to:
|
||||
|
||||
The `color-mix()` function gives you an easy way to blend two colors together according to a specified ratio in a chosen color space. This powerful feature lets you create color variations without manually calculating the resulting values.
|
||||
|
||||
```scss title="styles.css" icon="file-code"
|
||||
```css title="styles.css" icon="file-code"
|
||||
.button {
|
||||
/* Mix blue and red in the RGB color space with a 30/70 proportion */
|
||||
background-color: color-mix(in srgb, blue 30%, red);
|
||||
|
||||
@@ -231,67 +231,23 @@ const myPlugin: BunPlugin = {
|
||||
### onResolve
|
||||
|
||||
<Tabs>
|
||||
<Tab title="options">
|
||||
|
||||
- 🟢 `filter`
|
||||
- 🟢 `namespace`
|
||||
|
||||
</Tab>
|
||||
<Tab title="options">- 🟢 `filter` - 🟢 `namespace`</Tab>
|
||||
<Tab title="arguments">
|
||||
|
||||
- 🟢 `path`
|
||||
- 🟢 `importer`
|
||||
- 🔴 `namespace`
|
||||
- 🔴 `resolveDir`
|
||||
- 🔴 `kind`
|
||||
- 🔴 `pluginData`
|
||||
|
||||
- 🟢 `path` - 🟢 `importer` - 🔴 `namespace` - 🔴 `resolveDir` - 🔴 `kind` - 🔴 `pluginData`
|
||||
</Tab>
|
||||
<Tab title="results">
|
||||
|
||||
- 🟢 `namespace`
|
||||
- 🟢 `path`
|
||||
- 🔴 `errors`
|
||||
- 🔴 `external`
|
||||
- 🔴 `pluginData`
|
||||
- 🔴 `pluginName`
|
||||
- 🔴 `sideEffects`
|
||||
- 🔴 `suffix`
|
||||
- 🔴 `warnings`
|
||||
- 🔴 `watchDirs`
|
||||
- 🔴 `watchFiles`
|
||||
|
||||
- 🟢 `namespace` - 🟢 `path` - 🔴 `errors` - 🔴 `external` - 🔴 `pluginData` - 🔴 `pluginName` - 🔴 `sideEffects` -
|
||||
🔴 `suffix` - 🔴 `warnings` - 🔴 `watchDirs` - 🔴 `watchFiles`
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### onLoad
|
||||
|
||||
<Tabs>
|
||||
<Tab title="options">
|
||||
|
||||
- 🟢 `filter`
|
||||
- 🟢 `namespace`
|
||||
|
||||
</Tab>
|
||||
<Tab title="arguments">
|
||||
|
||||
- 🟢 `path`
|
||||
- 🔴 `namespace`
|
||||
- 🔴 `suffix`
|
||||
- 🔴 `pluginData`
|
||||
|
||||
</Tab>
|
||||
<Tab title="options">- 🟢 `filter` - 🟢 `namespace`</Tab>
|
||||
<Tab title="arguments">- 🟢 `path` - 🔴 `namespace` - 🔴 `suffix` - 🔴 `pluginData`</Tab>
|
||||
<Tab title="results">
|
||||
|
||||
- 🟢 `contents`
|
||||
- 🟢 `loader`
|
||||
- 🔴 `errors`
|
||||
- 🔴 `pluginData`
|
||||
- 🔴 `pluginName`
|
||||
- 🔴 `resolveDir`
|
||||
- 🔴 `warnings`
|
||||
- 🔴 `watchDirs`
|
||||
- 🔴 `watchFiles`
|
||||
|
||||
- 🟢 `contents` - 🟢 `loader` - 🔴 `errors` - 🔴 `pluginData` - 🔴 `pluginName` - 🔴 `resolveDir` - 🔴 `warnings` -
|
||||
🔴 `watchDirs` - 🔴 `watchFiles`
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -90,7 +90,7 @@ The order of the `--target` flag does not matter, as long as they're delimited b
|
||||
| bun-linux-x64 | Linux | x64 | ✅ | ✅ | glibc |
|
||||
| bun-linux-arm64 | Linux | arm64 | ✅ | N/A | glibc |
|
||||
| bun-windows-x64 | Windows | x64 | ✅ | ✅ | - |
|
||||
| ~~bun-windows-arm64~~ | ~~Windows~~ | ~~arm64~~ | ❌ | ❌ | - |
|
||||
| ~~bun-windows-arm64~~ | Windows | arm64 | ❌ | ❌ | - |
|
||||
| bun-darwin-x64 | macOS | x64 | ✅ | ✅ | - |
|
||||
| bun-darwin-arm64 | macOS | arm64 | ✅ | N/A | - |
|
||||
| bun-linux-x64-musl | Linux | x64 | ✅ | ✅ | musl |
|
||||
@@ -118,7 +118,7 @@ These constants are embedded directly into your compiled binary at build time, p
|
||||
|
||||
<Note>
|
||||
For comprehensive examples and advanced patterns, see the [Build-time constants
|
||||
guide](/guides/runtime/build-time-constants).
|
||||
guide](https://bun.com/guides/runtime/build-time-constants).
|
||||
</Note>
|
||||
|
||||
---
|
||||
@@ -154,8 +154,8 @@ Using bytecode compilation, `tsc` starts 2x faster:
|
||||
Bytecode compilation moves parsing overhead for large input files from runtime to bundle time. Your app starts faster, in exchange for making the `bun build` command a little slower. It doesn't obscure source code.
|
||||
|
||||
<Warning>
|
||||
**Experimental:** Bytecode compilation is an experimental feature. Only `cjs` format is supported (which means no
|
||||
top-level-await). Let us know if you run into any issues!
|
||||
**Experimental:** Bytecode compilation is an experimental feature introduced in Bun v1.1.30. Only `cjs` format is
|
||||
supported (which means no top-level-await). Let us know if you run into any issues!
|
||||
</Warning>
|
||||
|
||||
### What do these flags do?
|
||||
@@ -183,35 +183,6 @@ console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
|
||||
---
|
||||
|
||||
## Disabling automatic config loading
|
||||
|
||||
By default, standalone executables look for `.env` and `bunfig.toml` files in the directory where the executable is run. You can disable this behavior at build time for deterministic execution regardless of the user's working directory.
|
||||
|
||||
```bash icon="terminal" terminal
|
||||
# Disable .env loading
|
||||
bun build --compile --no-compile-autoload-dotenv ./app.ts --outfile myapp
|
||||
|
||||
# Disable bunfig.toml loading
|
||||
bun build --compile --no-compile-autoload-bunfig ./app.ts --outfile myapp
|
||||
|
||||
# Disable both
|
||||
bun build --compile --no-compile-autoload-dotenv --no-compile-autoload-bunfig ./app.ts --outfile myapp
|
||||
```
|
||||
|
||||
You can also configure this via the JavaScript API:
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
compile: {
|
||||
autoloadDotenv: false, // Disable .env loading
|
||||
autoloadBunfig: false, // Disable bunfig.toml loading
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Act as the Bun CLI
|
||||
|
||||
<Note>New in Bun v1.2.16</Note>
|
||||
@@ -245,7 +216,7 @@ However, with the `BUN_BE_BUN=1` environment variable, it acts just like the `bu
|
||||
|
||||
```bash icon="terminal" terminal
|
||||
# With the env var, the executable acts like the `bun` CLI
|
||||
BUN_BE_BUN=1 ./such-bun install
|
||||
bun_BE_BUN=1 ./such-bun install
|
||||
```
|
||||
|
||||
```txt
|
||||
@@ -288,12 +259,12 @@ console.log(`Server running at http://localhost:${server.port}`);
|
||||
</head>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<script src="./app.ts"></script>
|
||||
<script src="./app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
```ts app.ts icon="file-code"
|
||||
```ts app.js icon="file-code"
|
||||
console.log("Hello from the client!");
|
||||
```
|
||||
|
||||
@@ -349,7 +320,7 @@ new Worker(new URL("./my-worker.ts", import.meta.url));
|
||||
new Worker(new URL("./my-worker.ts", import.meta.url).href);
|
||||
```
|
||||
|
||||
When you add multiple entrypoints to a standalone executable, they will be bundled separately into the executable.
|
||||
As of Bun v1.1.25, when you add multiple entrypoints to a standalone executable, they will be bundled separately into the executable.
|
||||
|
||||
In the future, we may automatically detect usages of statically-known paths in `new Worker(path)` and then bundle those into the executable, but for now, you'll need to add it to the shell command manually like the above example.
|
||||
|
||||
@@ -424,7 +395,7 @@ This database is read-write, but all changes are lost when the executable exits
|
||||
|
||||
### Embed N-API Addons
|
||||
|
||||
You can embed `.node` files into executables.
|
||||
As of Bun v1.0.23, you can embed `.node` files into executables.
|
||||
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
const addon = require("./addon.node");
|
||||
@@ -553,46 +524,12 @@ codesign -vvv --verify ./myapp
|
||||
|
||||
---
|
||||
|
||||
## Code splitting
|
||||
|
||||
Standalone executables support code splitting. Use `--compile` with `--splitting` to create an executable that loads code-split chunks at runtime.
|
||||
|
||||
```bash
|
||||
bun build --compile --splitting ./src/entry.ts --outdir ./build
|
||||
```
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```ts src/entry.ts icon="/icons/typescript.svg"
|
||||
console.log("Entrypoint loaded");
|
||||
const lazy = await import("./lazy.ts");
|
||||
lazy.hello();
|
||||
```
|
||||
|
||||
```ts src/lazy.ts icon="/icons/typescript.svg"
|
||||
export function hello() {
|
||||
console.log("Lazy module loaded");
|
||||
}
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
./build/entry
|
||||
```
|
||||
|
||||
```txt
|
||||
Entrypoint loaded
|
||||
Lazy module loaded
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Unsupported CLI arguments
|
||||
|
||||
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
|
||||
|
||||
- `--outdir` — use `outfile` instead (except when using with `--splitting`).
|
||||
- `--outdir` — use `outfile` instead.
|
||||
- `--splitting`
|
||||
- `--public-path`
|
||||
- `--target=node` or `--target=browser`
|
||||
- `--no-bundle` - we always bundle everything into the executable.
|
||||
|
||||
@@ -632,7 +632,7 @@ const server = serve({
|
||||
console.log(`🚀 Server running on ${server.url}`);
|
||||
```
|
||||
|
||||
```html title="public/index.html" icon="file-code"
|
||||
```html title="public/index.html"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
@@ -757,7 +757,7 @@ export function App() {
|
||||
}
|
||||
```
|
||||
|
||||
```css title="src/styles.css" icon="file-code"
|
||||
```css title="src/styles.css"
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
@@ -999,7 +999,7 @@ CMD ["bun", "index.js"]
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```ini title=".env.production" icon="file-code"
|
||||
```bash title=".env.production" icon="file-code"
|
||||
NODE_ENV=production
|
||||
PORT=3000
|
||||
DATABASE_URL=postgresql://user:pass@localhost:5432/myapp
|
||||
|
||||
@@ -9,7 +9,7 @@ Hot Module Replacement (HMR) allows you to update modules in a running applicati
|
||||
|
||||
## `import.meta.hot` API Reference
|
||||
|
||||
Bun implements a client-side HMR API modeled after [Vite's `import.meta.hot` API](https://vite.dev/guide/api-hmr). It can be checked for with `if (import.meta.hot)`, tree-shaking it in production.
|
||||
Bun implements a client-side HMR API modeled after [Vite's `import.meta.hot` API](https://vitejs.dev/guide/api-hmr.html). It can be checked for with `if (import.meta.hot)`, tree-shaking it in production.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
if (import.meta.hot) {
|
||||
@@ -144,7 +144,7 @@ Indicates that multiple dependencies' modules can be accepted. This variant acce
|
||||
|
||||
`import.meta.hot.data` maintains state between module instances during hot replacement, enabling data transfer from previous to new versions. When `import.meta.hot.data` is written into, Bun will also mark this module as capable of self-accepting (equivalent of calling `import.meta.hot.accept()`).
|
||||
|
||||
```tsx title="index.tsx" icon="/icons/typescript.svg"
|
||||
```jsx title="index.ts" icon="/icons/typescript.svg"
|
||||
import { createRoot } from "react-dom/client";
|
||||
import { App } from "./app";
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ bun ./index.html
|
||||
```
|
||||
|
||||
```
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Press h + Enter to show shortcuts
|
||||
@@ -51,7 +51,7 @@ bun index.html
|
||||
```
|
||||
|
||||
```
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Press h + Enter to show shortcuts
|
||||
@@ -81,7 +81,7 @@ bun ./index.html ./about.html
|
||||
```
|
||||
|
||||
```txt
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Routes:
|
||||
@@ -104,7 +104,7 @@ bun ./**/*.html
|
||||
```
|
||||
|
||||
```
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Routes:
|
||||
@@ -122,7 +122,7 @@ bun ./index.html ./about/index.html ./about/foo/index.html
|
||||
```
|
||||
|
||||
```
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Routes:
|
||||
@@ -164,7 +164,7 @@ For example:
|
||||
}
|
||||
```
|
||||
|
||||
```css abc.css icon="file-code"
|
||||
```css abc.css
|
||||
body {
|
||||
background-color: red;
|
||||
}
|
||||
@@ -174,7 +174,7 @@ body {
|
||||
|
||||
This outputs:
|
||||
|
||||
```css styles.css icon="file-code"
|
||||
```css
|
||||
body {
|
||||
background-color: red;
|
||||
}
|
||||
@@ -237,27 +237,13 @@ Then, reference TailwindCSS in your HTML via `<link>` tag, `@import` in CSS, or
|
||||
|
||||
<Tabs>
|
||||
<Tab title="index.html">
|
||||
|
||||
```html title="index.html" icon="file-code"
|
||||
<!-- Reference TailwindCSS in your HTML -->
|
||||
{/* Reference TailwindCSS in your HTML */}
|
||||
<link rel="stylesheet" href="tailwindcss" />
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="styles.css">
|
||||
|
||||
```css title="styles.css" icon="file-code"
|
||||
@import "tailwindcss";
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="app.ts">
|
||||
|
||||
```ts title="app.ts" icon="/icons/typescript.svg"
|
||||
import "tailwindcss";
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="styles.css">```css title="styles.css" icon="file-code" @import "tailwindcss"; ```</Tab>
|
||||
<Tab title="app.ts">```ts title="app.ts" icon="/icons/typescript.svg" import "tailwindcss"; ```</Tab>
|
||||
</Tabs>
|
||||
|
||||
<Info>Only one of those are necessary, not all three.</Info>
|
||||
@@ -273,7 +259,7 @@ bun ./index.html --console
|
||||
```
|
||||
|
||||
```
|
||||
Bun v1.3.3
|
||||
Bun v1.2.20
|
||||
ready in 6.62ms
|
||||
→ http://localhost:3000/
|
||||
Press h + Enter to show shortcuts
|
||||
@@ -385,8 +371,7 @@ All paths are resolved relative to your HTML file, making it easy to organize yo
|
||||
- Need more configuration options for things like asset handling
|
||||
- Need a way to configure CORS, headers, etc.
|
||||
|
||||
{/* todo: find the correct link to link to as this 404's and there isn't any similar files */}
|
||||
{/* If you want to submit a PR, most of the code is [here](https://github.com/oven-sh/bun/blob/main/src/bun.js/api/bun/html-rewriter.ts). You could even copy paste that file into your project and use it as a starting point. */}
|
||||
If you want to submit a PR, most of the code is [here](https://github.com/oven-sh/bun/blob/main/src/bun.js/api/bun/html-rewriter.ts). You could even copy paste that file into your project and use it as a starting point.
|
||||
|
||||
</Warning>
|
||||
|
||||
|
||||
@@ -106,7 +106,7 @@ For each file specified in `entrypoints`, Bun will generate a new bundle. This b
|
||||
|
||||
The contents of `out/index.js` will look something like this:
|
||||
|
||||
```js title="out/index.js" icon="/icons/javascript.svg"
|
||||
```ts title="out/index.js" icon="/icons/javascript.svg"
|
||||
// out/index.js
|
||||
// ...
|
||||
// ~20k lines of code
|
||||
@@ -160,12 +160,8 @@ Like the Bun runtime, the bundler supports an array of file types out of the box
|
||||
| ----------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `.js` `.jsx` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` | Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms including dead code elimination and tree shaking. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code. |
|
||||
| `.json` | JSON files are parsed and inlined into the bundle as a JavaScript object.<br/><br/>`js<br/>import pkg from "./package.json";<br/>pkg.name; // => "my-package"<br/>` |
|
||||
| `.jsonc` | JSON with comments. Files are parsed and inlined into the bundle as a JavaScript object.<br/><br/>`js<br/>import config from "./config.jsonc";<br/>config.name; // => "my-config"<br/>` |
|
||||
| `.toml` | TOML files are parsed and inlined into the bundle as a JavaScript object.<br/><br/>`js<br/>import config from "./bunfig.toml";<br/>config.logLevel; // => "debug"<br/>` |
|
||||
| `.yaml` `.yml` | YAML files are parsed and inlined into the bundle as a JavaScript object.<br/><br/>`js<br/>import config from "./config.yaml";<br/>config.name; // => "my-app"<br/>` |
|
||||
| `.txt` | The contents of the text file are read and inlined into the bundle as a string.<br/><br/>`js<br/>import contents from "./file.txt";<br/>console.log(contents); // => "Hello, world!"<br/>` |
|
||||
| `.html` | HTML files are processed and any referenced assets (scripts, stylesheets, images) are bundled. |
|
||||
| `.css` | CSS files are bundled together into a single `.css` file in the output directory. |
|
||||
| `.node` `.wasm` | These files are supported by the Bun runtime, but during bundling they are treated as assets. |
|
||||
|
||||
### Assets
|
||||
@@ -527,7 +523,7 @@ Injects environment variables into the bundled output by converting `process.env
|
||||
|
||||
For the input below:
|
||||
|
||||
```js title="input.js" icon="/icons/javascript.svg"
|
||||
```ts title="input.js" icon="/icons/javascript.svg"
|
||||
// input.js
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.BAZ);
|
||||
@@ -535,7 +531,7 @@ console.log(process.env.BAZ);
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js title="output.js" icon="/icons/javascript.svg"
|
||||
```ts title="output.js" icon="/icons/javascript.svg"
|
||||
// output.js
|
||||
console.log("bar");
|
||||
console.log("123");
|
||||
@@ -580,7 +576,7 @@ console.log(process.env.BAZ);
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js title="output.js" icon="/icons/javascript.svg"
|
||||
```ts title="output.js" icon="/icons/javascript.svg"
|
||||
console.log(process.env.FOO);
|
||||
console.log("https://acme.com");
|
||||
console.log(process.env.BAZ);
|
||||
@@ -722,7 +718,7 @@ Normally, bundling `index.tsx` would generate a bundle containing the entire sou
|
||||
|
||||
The generated bundle will look something like this:
|
||||
|
||||
```js title="out/index.js" icon="/icons/javascript.svg"
|
||||
```ts title="out/index.js" icon="/icons/javascript.svg"
|
||||
import { z } from "zod";
|
||||
|
||||
// ...
|
||||
@@ -1026,7 +1022,7 @@ Setting `publicPath` will prefix all file paths with the specified value.
|
||||
|
||||
The output file would now look something like this.
|
||||
|
||||
```js title="out/index.js" icon="/icons/javascript.svg"
|
||||
```ts title="out/index.js" icon="/icons/javascript.svg"
|
||||
var logo = "https://cdn.example.com/logo-a7305bdef.svg";
|
||||
```
|
||||
|
||||
@@ -1356,12 +1352,10 @@ interface BuildConfig {
|
||||
* JSX configuration object for controlling JSX transform behavior
|
||||
*/
|
||||
jsx?: {
|
||||
runtime?: "automatic" | "classic";
|
||||
importSource?: string;
|
||||
factory?: string;
|
||||
fragment?: string;
|
||||
sideEffects?: boolean;
|
||||
development?: boolean;
|
||||
importSource?: string;
|
||||
runtime?: "automatic" | "classic";
|
||||
};
|
||||
naming?:
|
||||
| string
|
||||
@@ -1378,7 +1372,7 @@ interface BuildConfig {
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | boolean; // default: "none", true -> "inline"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
@@ -1445,20 +1439,13 @@ interface BuildConfig {
|
||||
drop?: string[];
|
||||
|
||||
/**
|
||||
* - When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* - When set to `false`, returns a {@link BuildOutput} with `{success: false}`
|
||||
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
|
||||
*
|
||||
* @default true
|
||||
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
|
||||
* as most usage of `Bun.build` forgets to check for errors.
|
||||
*/
|
||||
throw?: boolean;
|
||||
|
||||
/**
|
||||
* Custom tsconfig.json file path to use for path resolution.
|
||||
* Equivalent to `--tsconfig-override` in the CLI.
|
||||
*/
|
||||
tsconfig?: string;
|
||||
|
||||
outdir?: string;
|
||||
}
|
||||
|
||||
interface BuildOutput {
|
||||
@@ -1475,21 +1462,7 @@ interface BuildArtifact extends Blob {
|
||||
sourcemap: BuildArtifact | null;
|
||||
}
|
||||
|
||||
type Loader =
|
||||
| "js"
|
||||
| "jsx"
|
||||
| "ts"
|
||||
| "tsx"
|
||||
| "css"
|
||||
| "json"
|
||||
| "jsonc"
|
||||
| "toml"
|
||||
| "yaml"
|
||||
| "text"
|
||||
| "file"
|
||||
| "napi"
|
||||
| "wasm"
|
||||
| "html";
|
||||
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "file" | "napi" | "wasm" | "text";
|
||||
|
||||
interface BuildOutput {
|
||||
outputs: BuildArtifact[];
|
||||
|
||||
@@ -7,16 +7,14 @@ The Bun bundler implements a set of default loaders out of the box.
|
||||
|
||||
> As a rule of thumb: **the bundler and the runtime both support the same set of file types out of the box.**
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.txt` `.wasm` `.node` `.html`
|
||||
|
||||
Bun uses the file extension to determine which built-in loader should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building plugins that extend Bun with custom loaders.
|
||||
|
||||
You can explicitly specify which loader to use using the `'type'` import attribute.
|
||||
You can explicitly specify which loader to use using the `'loader'` import attribute.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
import my_toml from "./my_file" with { type: "toml" };
|
||||
// or with dynamic imports
|
||||
const { default: my_toml } = await import("./my_file", { with: { type: "toml" } });
|
||||
import my_toml from "./my_file" with { loader: "toml" };
|
||||
```
|
||||
|
||||
## Built-in loaders
|
||||
@@ -87,7 +85,7 @@ If a `.json` file is passed as an entrypoint to the bundler, it will be converte
|
||||
}
|
||||
```
|
||||
|
||||
```js Output
|
||||
```ts Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
@@ -99,32 +97,7 @@ export default {
|
||||
|
||||
---
|
||||
|
||||
### `jsonc`
|
||||
|
||||
**JSON with Comments loader.** Default for `.jsonc`.
|
||||
|
||||
JSONC (JSON with Comments) files can be directly imported. Bun will parse them, stripping out comments and trailing commas.
|
||||
|
||||
```js
|
||||
import config from "./config.jsonc";
|
||||
console.log(config);
|
||||
```
|
||||
|
||||
During bundling, the parsed JSONC is inlined into the bundle as a JavaScript object, identical to the `json` loader.
|
||||
|
||||
```js
|
||||
var config = {
|
||||
option: "value",
|
||||
};
|
||||
```
|
||||
|
||||
<Note>
|
||||
Bun automatically uses the `jsonc` loader for `tsconfig.json`, `jsconfig.json`, `package.json`, and `bun.lock` files.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
### `toml`
|
||||
### toml
|
||||
|
||||
**TOML loader.** Default for `.toml`.
|
||||
|
||||
@@ -158,7 +131,7 @@ age = 35
|
||||
email = "johndoe@example.com"
|
||||
```
|
||||
|
||||
```js Output
|
||||
```ts Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
@@ -170,53 +143,7 @@ export default {
|
||||
|
||||
---
|
||||
|
||||
### `yaml`
|
||||
|
||||
**YAML loader.** Default for `.yaml` and `.yml`.
|
||||
|
||||
YAML files can be directly imported. Bun will parse them with its fast native YAML parser.
|
||||
|
||||
```js
|
||||
import config from "./config.yaml";
|
||||
console.log(config);
|
||||
|
||||
// via import attribute:
|
||||
import data from "./data.txt" with { type: "yaml" };
|
||||
```
|
||||
|
||||
During bundling, the parsed YAML is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```js
|
||||
var config = {
|
||||
name: "my-app",
|
||||
version: "1.0.0",
|
||||
// ...other fields
|
||||
};
|
||||
```
|
||||
|
||||
If a `.yaml` or `.yml` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```yaml Input
|
||||
name: John Doe
|
||||
age: 35
|
||||
email: johndoe@example.com
|
||||
```
|
||||
|
||||
```js Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
};
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
---
|
||||
|
||||
### `text`
|
||||
### text
|
||||
|
||||
**Text loader.** Default for `.txt`.
|
||||
|
||||
@@ -246,7 +173,7 @@ If a `.txt` file is passed as an entrypoint, it will be converted to a `.js` mod
|
||||
Hello, world!
|
||||
```
|
||||
|
||||
```js Output
|
||||
```ts Output
|
||||
export default "Hello, world!";
|
||||
```
|
||||
|
||||
@@ -254,7 +181,7 @@ export default "Hello, world!";
|
||||
|
||||
---
|
||||
|
||||
### `napi`
|
||||
### napi
|
||||
|
||||
**Native addon loader.** Default for `.node`.
|
||||
|
||||
@@ -269,7 +196,7 @@ console.log(addon);
|
||||
|
||||
---
|
||||
|
||||
### `sqlite`
|
||||
### sqlite
|
||||
|
||||
**SQLite loader.** Requires `with { "type": "sqlite" }` import attribute.
|
||||
|
||||
@@ -299,9 +226,7 @@ Otherwise, the database to embed is copied into the `outdir` with a hashed filen
|
||||
|
||||
---
|
||||
|
||||
### `html`
|
||||
|
||||
**HTML loader.** Default for `.html`.
|
||||
### html
|
||||
|
||||
The `html` loader processes HTML files and bundles any referenced assets. It will:
|
||||
|
||||
@@ -312,7 +237,7 @@ The `html` loader processes HTML files and bundles any referenced assets. It wil
|
||||
|
||||
For example, given this HTML file:
|
||||
|
||||
```html title="src/index.html" icon="file-code"
|
||||
```html title="src/index.html"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
@@ -325,7 +250,7 @@ For example, given this HTML file:
|
||||
|
||||
It will output a new HTML file with the bundled assets:
|
||||
|
||||
```html title="dist/index.html" icon="file-code"
|
||||
```html title="dist/index.html"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
@@ -376,27 +301,7 @@ The `html` loader behaves differently depending on how it's used:
|
||||
|
||||
---
|
||||
|
||||
### `css`
|
||||
|
||||
**CSS loader.** Default for `.css`.
|
||||
|
||||
CSS files can be directly imported. The bundler will parse and bundle CSS files, handling `@import` statements and `url()` references.
|
||||
|
||||
```js
|
||||
import "./styles.css";
|
||||
```
|
||||
|
||||
During bundling, all imported CSS files are bundled together into a single `.css` file in the output directory.
|
||||
|
||||
```css
|
||||
.my-class {
|
||||
background: url("./image.png");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `sh`
|
||||
### sh
|
||||
|
||||
**Bun Shell loader.** Default for `.sh` files.
|
||||
|
||||
@@ -408,7 +313,7 @@ bun run ./script.sh
|
||||
|
||||
---
|
||||
|
||||
### `file`
|
||||
### file
|
||||
|
||||
**File loader.** Default for all unrecognized file types.
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ macro();
|
||||
|
||||
When shipping a library containing a macro to npm or another package registry, use the `"macro"` export condition to provide a special version of your package exclusively for the macro environment.
|
||||
|
||||
```json title="package.json" icon="file-json"
|
||||
```json title="package.json" icon="file-code"
|
||||
{
|
||||
"name": "my-package",
|
||||
"exports": {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,21 +42,7 @@ type PluginBuilder = {
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
type Loader =
|
||||
| "js"
|
||||
| "jsx"
|
||||
| "ts"
|
||||
| "tsx"
|
||||
| "json"
|
||||
| "jsonc"
|
||||
| "toml"
|
||||
| "yaml"
|
||||
| "file"
|
||||
| "napi"
|
||||
| "wasm"
|
||||
| "text"
|
||||
| "css"
|
||||
| "html";
|
||||
type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml";
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -188,7 +188,7 @@
|
||||
{
|
||||
"group": "Publishing & Analysis",
|
||||
"icon": "upload",
|
||||
"pages": ["/pm/cli/publish", "/pm/cli/outdated", "/pm/cli/why", "/pm/cli/audit", "/pm/cli/info"]
|
||||
"pages": ["/pm/cli/publish", "/pm/cli/outdated", "/pm/cli/why", "/pm/cli/audit"]
|
||||
},
|
||||
{
|
||||
"group": "Workspace Management",
|
||||
@@ -298,14 +298,7 @@
|
||||
{
|
||||
"group": "Deployment",
|
||||
"icon": "rocket",
|
||||
"pages": [
|
||||
"/guides/deployment/vercel",
|
||||
"/guides/deployment/railway",
|
||||
"/guides/deployment/render",
|
||||
"/guides/deployment/aws-lambda",
|
||||
"/guides/deployment/digital-ocean",
|
||||
"/guides/deployment/google-cloud-run"
|
||||
]
|
||||
"pages": ["/guides/deployment/vercel", "/guides/deployment/railway", "/guides/deployment/render"]
|
||||
},
|
||||
{
|
||||
"group": "Runtime & Debugging",
|
||||
@@ -354,7 +347,7 @@
|
||||
"/guides/ecosystem/discordjs",
|
||||
"/guides/ecosystem/docker",
|
||||
"/guides/ecosystem/drizzle",
|
||||
"/guides/ecosystem/gel",
|
||||
"/guides/ecosystem/edgedb",
|
||||
"/guides/ecosystem/elysia",
|
||||
"/guides/ecosystem/express",
|
||||
"/guides/ecosystem/hono",
|
||||
@@ -369,15 +362,13 @@
|
||||
"/guides/ecosystem/qwik",
|
||||
"/guides/ecosystem/react",
|
||||
"/guides/ecosystem/remix",
|
||||
"/guides/ecosystem/tanstack-start",
|
||||
"/guides/ecosystem/sentry",
|
||||
"/guides/ecosystem/solidstart",
|
||||
"/guides/ecosystem/ssr-react",
|
||||
"/guides/ecosystem/stric",
|
||||
"/guides/ecosystem/sveltekit",
|
||||
"/guides/ecosystem/systemd",
|
||||
"/guides/ecosystem/vite",
|
||||
"/guides/ecosystem/upstash"
|
||||
"/guides/ecosystem/vite"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -464,7 +455,6 @@
|
||||
"/guides/test/update-snapshots",
|
||||
"/guides/test/coverage",
|
||||
"/guides/test/coverage-threshold",
|
||||
"/guides/test/concurrent-test-glob",
|
||||
"/guides/test/skip-tests",
|
||||
"/guides/test/todo-tests",
|
||||
"/guides/test/timeout",
|
||||
|
||||
@@ -4,9 +4,13 @@ description: Share feedback, bug reports, and feature requests
|
||||
mode: center
|
||||
---
|
||||
|
||||
import Feedback from "/snippets/cli/feedback.mdx";
|
||||
|
||||
Whether you've found a bug, have a performance issue, or just want to suggest an improvement, here's how you can open a helpful issue:
|
||||
|
||||
<Callout icon="discord">For general questions, please join our [Discord](https://bun.com/discord).</Callout>
|
||||
<Callout icon="discord">
|
||||
For general questions, please join our [Discord](https://discord.com/invite/CXdq2DP29u).
|
||||
</Callout>
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
@@ -52,7 +56,9 @@ Whether you've found a bug, have a performance issue, or just want to suggest an
|
||||
<Note>
|
||||
- For MacOS and Linux: copy the output of `uname -mprs`
|
||||
- For Windows: copy the output of this command in the powershell console:
|
||||
`"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"`
|
||||
```powershell
|
||||
"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"
|
||||
```
|
||||
</Note>
|
||||
</Step>
|
||||
|
||||
@@ -73,3 +79,7 @@ echo "please document X" | bun feedback --email you@example.com
|
||||
```
|
||||
|
||||
You can provide feedback as text arguments, file paths, or piped input.
|
||||
|
||||
---
|
||||
|
||||
<Feedback />
|
||||
|
||||
@@ -26,4 +26,4 @@ const regularArr = Array.from(uintArr);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -23,4 +23,4 @@ blob.type; // => "application/octet-stream"
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -24,4 +24,4 @@ const nodeBuffer = Buffer.from(arrBuffer, 0, 16); // view first 16 bytes
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -14,4 +14,4 @@ const str = decoder.decode(buf);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -38,4 +38,4 @@ const arr = new Uint8Array(buffer, 0, 16); // view first 16 bytes
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Utils](/runtime/utils) for more useful utilities.
|
||||
See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities.
|
||||
|
||||
@@ -13,4 +13,4 @@ const buf = await blob.arrayBuffer();
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const arr = new DataView(await blob.arrayBuffer());
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const stream = await blob.stream();
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -14,4 +14,4 @@ const str = await blob.text();
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const arr = new Uint8Array(await blob.arrayBuffer());
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const arrBuf = nodeBuf.buffer;
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const blob = new Blob([buf]);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -40,4 +40,4 @@ const stream = blob.stream(1024);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -24,4 +24,4 @@ const str = buf.toString("utf8", 0, 5);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ buf instanceof Uint8Array; // => true
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -14,4 +14,4 @@ const str = decoder.decode(dv);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -24,4 +24,4 @@ arr.byteLength; // => 32
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -15,4 +15,4 @@ console.log(await blob.text());
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const buf = Buffer.from(arr);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -13,4 +13,4 @@ const dv = new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -40,4 +40,4 @@ const stream = blob.stream(1024);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -15,4 +15,4 @@ const str = decoder.decode(arr);
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Binary Data](/runtime/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
See [Docs > API > Binary Data](https://bun.com/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
---
|
||||
title: Deploy a Bun application on AWS Lambda
|
||||
sidebarTitle: Deploy on AWS Lambda
|
||||
mode: center
|
||||
---
|
||||
|
||||
[AWS Lambda](https://aws.amazon.com/lambda/) is a serverless compute service that lets you run code without provisioning or managing servers.
|
||||
|
||||
In this guide, we will deploy a Bun HTTP server to AWS Lambda using a `Dockerfile`.
|
||||
|
||||
<Note>
|
||||
Before continuing, make sure you have:
|
||||
|
||||
- A Bun application ready for deployment
|
||||
- An [AWS account](https://aws.amazon.com/)
|
||||
- [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html) installed and configured
|
||||
- [Docker](https://docs.docker.com/get-started/get-docker/) installed and added to your `PATH`
|
||||
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a new Dockerfile">
|
||||
Make sure you're in the directory containing your project, then create a new `Dockerfile` in the root of your project. This file contains the instructions to initialize the container, copy your local project files into it, install dependencies, and start the application.
|
||||
|
||||
```docker Dockerfile icon="docker"
|
||||
# Use the official AWS Lambda adapter image to handle the Lambda runtime
|
||||
FROM public.ecr.aws/awsguru/aws-lambda-adapter:0.9.0 AS aws-lambda-adapter
|
||||
|
||||
# Use the official Bun image to run the application
|
||||
FROM oven/bun:debian AS bun_latest
|
||||
|
||||
# Copy the Lambda adapter into the container
|
||||
COPY --from=aws-lambda-adapter /lambda-adapter /opt/extensions/lambda-adapter
|
||||
|
||||
# Set the port to 8080. This is required for the AWS Lambda adapter.
|
||||
ENV PORT=8080
|
||||
|
||||
# Set the work directory to `/var/task`. This is the default work directory for Lambda.
|
||||
WORKDIR "/var/task"
|
||||
|
||||
# Copy the package.json and bun.lock into the container
|
||||
COPY package.json bun.lock ./
|
||||
|
||||
# Install the dependencies
|
||||
RUN bun install --production --frozen-lockfile
|
||||
|
||||
# Copy the rest of the application into the container
|
||||
COPY . /var/task
|
||||
|
||||
# Run the application.
|
||||
CMD ["bun", "index.ts"]
|
||||
```
|
||||
|
||||
<Note>
|
||||
Make sure that the start command corresponds to your application's entry point. This can also be `CMD ["bun", "run", "start"]` if you have a start script in your `package.json`.
|
||||
|
||||
This image installs dependencies and runs your app with Bun inside a container. If your app doesn't have dependencies, you can omit the `RUN bun install --production --frozen-lockfile` line.
|
||||
</Note>
|
||||
|
||||
Create a new `.dockerignore` file in the root of your project. This file contains the files and directories that should be _excluded_ from the container image, such as `node_modules`. This makes your builds faster and smaller:
|
||||
|
||||
```docker .dockerignore icon="Docker"
|
||||
node_modules
|
||||
Dockerfile*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
.vscode
|
||||
.env
|
||||
# Any other files or directories you want to exclude
|
||||
```
|
||||
</Step>
|
||||
<Step title="Build the Docker image">
|
||||
Make sure you're in the directory containing your `Dockerfile`, then build the Docker image. In this case, we'll call the image `bun-lambda-demo` and tag it as `latest`.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# cd /path/to/your/app
|
||||
docker build --provenance=false --platform linux/amd64 -t bun-lambda-demo:latest .
|
||||
```
|
||||
</Step>
|
||||
<Step title="Create an ECR repository">
|
||||
To push the image to AWS Lambda, we first need to create an [ECR repository](https://aws.amazon.com/ecr/) to push the image to.
|
||||
|
||||
By running the following command, we:
|
||||
- Create an ECR repository named `bun-lambda-demo` in the `us-east-1` region
|
||||
- Get the repository URI, and export the repository URI as an environment variable. This is optional, but make the next steps easier.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
export ECR_URI=$(aws ecr create-repository --repository-name bun-lambda-demo --region us-east-1 --query 'repository.repositoryUri' --output text)
|
||||
echo $ECR_URI
|
||||
```
|
||||
```txt
|
||||
[id].dkr.ecr.us-east-1.amazonaws.com/bun-lambda-demo
|
||||
```
|
||||
|
||||
<Note>
|
||||
If you're using IAM Identity Center (SSO) or have configured AWS CLI with profiles, you'll need to add the `--profile` flag to your AWS CLI commands.
|
||||
|
||||
For example, if your profile is named `my-sso-app`, use `--profile my-sso-app`. Check your AWS CLI configuration with `aws configure list-profiles` to see available profiles.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
export ECR_URI=$(aws ecr create-repository --repository-name bun-lambda-demo --region us-east-1 --profile my-sso-app --query 'repository.repositoryUri' --output text)
|
||||
echo $ECR_URI
|
||||
```
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Authenticate with the ECR repository">
|
||||
Log in to the ECR repository:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $ECR_URI
|
||||
```
|
||||
```txt
|
||||
Login Succeeded
|
||||
```
|
||||
|
||||
<Note>
|
||||
If using a profile, use the `--profile` flag:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
aws ecr get-login-password --region us-east-1 --profile my-sso-app | docker login --username AWS --password-stdin $ECR_URI
|
||||
```
|
||||
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Tag and push the docker image to the ECR repository">
|
||||
Make sure you're in the directory containing your `Dockerfile`, then tag the docker image with the ECR repository URI.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
docker tag bun-lambda-demo:latest ${ECR_URI}:latest
|
||||
```
|
||||
|
||||
Then, push the image to the ECR repository.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
docker push ${ECR_URI}:latest
|
||||
```
|
||||
</Step>
|
||||
<Step title="Create an AWS Lambda function">
|
||||
Go to **AWS Console** > **Lambda** > [**Create Function**](https://us-east-1.console.aws.amazon.com/lambda/home?region=us-east-1#/create/function?intent=authorFromImage) > Select **Container image**
|
||||
|
||||
<Warning>Make sure you've selected the right region, this URL defaults to `us-east-1`.</Warning>
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
Give the function a name, like `my-bun-function`.
|
||||
</Step>
|
||||
<Step title="Select the container image">
|
||||
Then, go to the **Container image URI** section, click on **Browse images**. Select the image we just pushed to the ECR repository.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
Then, select the `latest` image, and click on **Select image**.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Configure the function">
|
||||
To get a public URL for the function, we need to go to **Additional configurations** > **Networking** > **Function URL**.
|
||||
|
||||
Set this to **Enable**, with Auth Type **NONE**.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Create the function">
|
||||
Click on **Create function** at the bottom of the page, this will create the function.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
</Step>
|
||||
<Step title="Get the function URL">
|
||||
Once the function has been created you'll be redirected to the function's page, where you can see the function URL in the **"Function URL"** section.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Test the function">
|
||||
🥳 Your app is now live! To test the function, you can either go to the **Test** tab, or call the function URL directly.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
curl -X GET https://[your-function-id].lambda-url.us-east-1.on.aws/
|
||||
```
|
||||
```txt
|
||||
Hello from Bun on Lambda!
|
||||
```
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
@@ -1,161 +0,0 @@
|
||||
---
|
||||
title: Deploy a Bun application on DigitalOcean
|
||||
sidebarTitle: Deploy on DigitalOcean
|
||||
mode: center
|
||||
---
|
||||
|
||||
[DigitalOcean](https://www.digitalocean.com/) is a cloud platform that provides a range of services for building and deploying applications.
|
||||
|
||||
In this guide, we will deploy a Bun HTTP server to DigitalOcean using a `Dockerfile`.
|
||||
|
||||
<Note>
|
||||
Before continuing, make sure you have:
|
||||
|
||||
- A Bun application ready for deployment
|
||||
- A [DigitalOcean account](https://www.digitalocean.com/)
|
||||
- [DigitalOcean CLI](https://docs.digitalocean.com/reference/doctl/how-to/install/#step-1-install-doctl) installed and configured
|
||||
- [Docker](https://docs.docker.com/get-started/get-docker/) installed and added to your `PATH`
|
||||
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a new DigitalOcean Container Registry">
|
||||
Create a new Container Registry to store the Docker image.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Through the DigitalOcean dashboard">
|
||||
In the DigitalOcean dashboard, go to [**Container Registry**](https://cloud.digitalocean.com/registry), and enter the details for the new registry.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
Make sure the details are correct, then click **Create Registry**.
|
||||
</Tab>
|
||||
<Tab title="Through the DigitalOcean CLI">
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
doctl registry create bun-digitalocean-demo
|
||||
```
|
||||
```txt
|
||||
Name Endpoint Region slug
|
||||
bun-digitalocean-demo registry.digitalocean.com/bun-digitalocean-demo sfo2
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
You should see the new registry in the [**DigitalOcean registry dashboard**](https://cloud.digitalocean.com/registry):
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Create a new Dockerfile">
|
||||
Make sure you're in the directory containing your project, then create a new `Dockerfile` in the root of your project. This file contains the instructions to initialize the container, copy your local project files into it, install dependencies, and start the application.
|
||||
|
||||
```docker Dockerfile icon="docker"
|
||||
# Use the official Bun image to run the application
|
||||
FROM oven/bun:debian
|
||||
|
||||
# Set the work directory to `/app`
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the package.json and bun.lock into the container
|
||||
COPY package.json bun.lock ./
|
||||
|
||||
# Install the dependencies
|
||||
RUN bun install --production --frozen-lockfile
|
||||
|
||||
# Copy the rest of the application into the container
|
||||
COPY . .
|
||||
|
||||
# Expose the port (DigitalOcean will set PORT env var)
|
||||
EXPOSE 8080
|
||||
|
||||
# Run the application
|
||||
CMD ["bun", "index.ts"]
|
||||
```
|
||||
|
||||
<Note>
|
||||
Make sure that the start command corresponds to your application's entry point. This can also be `CMD ["bun", "run", "start"]` if you have a start script in your `package.json`.
|
||||
|
||||
This image installs dependencies and runs your app with Bun inside a container. If your app doesn't have dependencies, you can omit the `RUN bun install --production --frozen-lockfile` line.
|
||||
</Note>
|
||||
|
||||
Create a new `.dockerignore` file in the root of your project. This file contains the files and directories that should be _excluded_ from the container image, such as `node_modules`. This makes your builds faster and smaller:
|
||||
|
||||
```docker .dockerignore icon="Docker"
|
||||
node_modules
|
||||
Dockerfile*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
.vscode
|
||||
.env
|
||||
# Any other files or directories you want to exclude
|
||||
```
|
||||
</Step>
|
||||
<Step title="Authenticate Docker with DigitalOcean registry">
|
||||
Before building and pushing the Docker image, authenticate Docker with the DigitalOcean Container Registry:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
doctl registry login
|
||||
```
|
||||
```txt
|
||||
Successfully authenticated with registry.digitalocean.com
|
||||
```
|
||||
|
||||
<Note>
|
||||
This command authenticates Docker with DigitalOcean's registry using your DigitalOcean credentials. Without this step, the build and push command will fail with a 401 authentication error.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Build and push the Docker image to the DigitalOcean registry">
|
||||
Make sure you're in the directory containing your `Dockerfile`, then build and push the Docker image to the DigitalOcean registry in one command:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
docker buildx build --platform=linux/amd64 -t registry.digitalocean.com/bun-digitalocean-demo/bun-digitalocean-demo:latest --push .
|
||||
```
|
||||
|
||||
<Note>
|
||||
If you're building on an ARM Mac (M1/M2), you must use `docker buildx` with `--platform=linux/amd64` to ensure compatibility with DigitalOcean's infrastructure. Using `docker build` without the platform flag will create an ARM64 image that won't run on DigitalOcean.
|
||||
</Note>
|
||||
|
||||
Once the image is pushed, you should see it in the [**DigitalOcean registry dashboard**](https://cloud.digitalocean.com/registry):
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Create a new DigitalOcean App Platform project">
|
||||
In the DigitalOcean dashboard, go to [**App Platform**](https://cloud.digitalocean.com/apps) > **Create App**. We can create a project directly from the container image.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
Make sure the details are correct, then click **Next**.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
Review and configure resource settings, then click **Create app**.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
<Step title="Visit your live application">
|
||||
🥳 Your app is now live! Once the app is created, you should see it in the App Platform dashboard with the public URL.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
@@ -1,194 +0,0 @@
|
||||
---
|
||||
title: Deploy a Bun application on Google Cloud Run
|
||||
sidebarTitle: Deploy on Google Cloud Run
|
||||
mode: center
|
||||
---
|
||||
|
||||
[Google Cloud Run](https://cloud.google.com/run) is a managed platform for deploying and scaling serverless applications. Google handles the infrastructure for you.
|
||||
|
||||
In this guide, we will deploy a Bun HTTP server to Google Cloud Run using a `Dockerfile`.
|
||||
|
||||
<Note>
|
||||
Before continuing, make sure you have:
|
||||
|
||||
- A Bun application ready for deployment
|
||||
- A [Google Cloud account](https://cloud.google.com/) with billing enabled
|
||||
- [Google Cloud CLI](https://cloud.google.com/sdk/docs/install) installed and configured
|
||||
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title={<span>Initialize <code>gcloud</code> by select/creating a project</span>}>
|
||||
|
||||
Make sure that you've initialized the Google Cloud CLI. This command logs you in, and prompts you to either select an existing project or create a new one.
|
||||
|
||||
For more help with the Google Cloud CLI, see the [official documentation](https://docs.cloud.google.com/sdk/gcloud/reference/init).
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
gcloud init
|
||||
```
|
||||
|
||||
```txt
|
||||
Welcome! This command will take you through the configuration of gcloud.
|
||||
|
||||
You must sign in to continue. Would you like to sign in (Y/n)? Y
|
||||
You are signed in as [email@example.com].
|
||||
|
||||
Pick cloud project to use:
|
||||
[1] existing-bun-app-1234
|
||||
[2] Enter a project ID
|
||||
[3] Create a new project
|
||||
Please enter numeric choice or text value (must exactly match list item): 3
|
||||
|
||||
Enter a Project ID. my-bun-app
|
||||
Your current project has been set to: [my-bun-app]
|
||||
|
||||
The Google Cloud CLI is configured and ready to use!
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Store your project info in environment variables">
|
||||
Set variables for your project ID and number so they're easier to reuse in the following steps.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
PROJECT_ID=$(gcloud projects list --format='value(projectId)' --filter='name="my bun app"')
|
||||
PROJECT_NUMBER=$(gcloud projects list --format='value(projectNumber)' --filter='name="my bun app"')
|
||||
|
||||
echo $PROJECT_ID $PROJECT_NUMBER
|
||||
```
|
||||
|
||||
```txt
|
||||
my-bun-app-... [PROJECT_NUMBER]
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Link a billing account">
|
||||
List your available billing accounts and link one to your project:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
gcloud billing accounts list
|
||||
```
|
||||
|
||||
```txt
|
||||
ACCOUNT_ID NAME OPEN MASTER_ACCOUNT_ID
|
||||
[BILLING_ACCOUNT_ID] My Billing Account True
|
||||
```
|
||||
|
||||
Link your billing account to your project. Replace `[BILLING_ACCOUNT_ID]` with the ID of your billing account.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
gcloud billing projects link $PROJECT_ID --billing-account=[BILLING_ACCOUNT_ID]
|
||||
```
|
||||
|
||||
```txt
|
||||
billingAccountName: billingAccounts/[BILLING_ACCOUNT_ID]
|
||||
billingEnabled: true
|
||||
name: projects/my-bun-app-.../billingInfo
|
||||
projectId: my-bun-app-...
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Enable APIs and configure IAM roles">
|
||||
Activate the necessary services and grant Cloud Build permissions:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
gcloud services enable run.googleapis.com cloudbuild.googleapis.com
|
||||
gcloud projects add-iam-policy-binding $PROJECT_ID \
|
||||
--member=serviceAccount:$PROJECT_NUMBER-compute@developer.gserviceaccount.com \
|
||||
--role=roles/run.builder
|
||||
```
|
||||
|
||||
<Note>
|
||||
These commands enable Cloud Run (`run.googleapis.com`) and Cloud Build (`cloudbuild.googleapis.com`), which are required for deploying from source. Cloud Run runs your containerized app, while Cloud Build handles building and packaging it.
|
||||
|
||||
The IAM binding grants the Compute Engine service account (`$PROJECT_NUMBER-compute@developer.gserviceaccount.com`) permission to build and deploy images on your behalf.
|
||||
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Add a Dockerfile">
|
||||
Create a new `Dockerfile` in the root of your project. This file contains the instructions to initialize the container, copy your local project files into it, install dependencies, and start the application.
|
||||
|
||||
```docker Dockerfile icon="docker"
|
||||
# Use the official Bun image to run the application
|
||||
FROM oven/bun:latest
|
||||
|
||||
# Copy the package.json and bun.lock into the container
|
||||
COPY package.json bun.lock ./
|
||||
|
||||
# Install the dependencies
|
||||
# Install the dependencies
|
||||
RUN bun install --production --frozen-lockfile
|
||||
|
||||
# Copy the rest of the application into the container
|
||||
COPY . .
|
||||
|
||||
# Run the application
|
||||
CMD ["bun", "index.ts"]
|
||||
```
|
||||
|
||||
<Note>
|
||||
Make sure that the start command corresponds to your application's entry point. This can also be `CMD ["bun", "run", "start"]` if you have a start script in your `package.json`.
|
||||
|
||||
This image installs dependencies and runs your app with Bun inside a container. If your app doesn't have dependencies, you can omit the `RUN bun install --production --frozen-lockfile` line.
|
||||
|
||||
</Note>
|
||||
|
||||
Create a new `.dockerignore` file in the root of your project. This file contains the files and directories that should be _excluded_ from the container image, such as `node_modules`. This makes your builds faster and smaller:
|
||||
|
||||
```docker .dockerignore icon="Docker"
|
||||
node_modules
|
||||
Dockerfile*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
.vscode
|
||||
.env
|
||||
# Any other files or directories you want to exclude
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Deploy your service">
|
||||
Make sure you're in the directory containing your `Dockerfile`, then deploy directly from your local source:
|
||||
|
||||
<Note>
|
||||
Update the `--region` flag to your preferred region. You can also omit this flag to get an interactive prompt to
|
||||
select a region.
|
||||
</Note>
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
gcloud run deploy my-bun-app --source . --region=us-west1 --allow-unauthenticated
|
||||
```
|
||||
|
||||
```txt
|
||||
Deploying from source requires an Artifact Registry Docker repository to store built containers. A repository named
|
||||
[cloud-run-source-deploy] in region [us-west1] will be created.
|
||||
|
||||
Do you want to continue (Y/n)? Y
|
||||
|
||||
Building using Dockerfile and deploying container to Cloud Run service [my-bun-app] in project [my-bun-app-...] region [us-west1]
|
||||
✓ Building and deploying... Done.
|
||||
✓ Validating Service...
|
||||
✓ Uploading sources...
|
||||
✓ Building Container... Logs are available at [https://console.cloud.google.com/cloud-build/builds...].
|
||||
✓ Creating Revision...
|
||||
✓ Routing traffic...
|
||||
✓ Setting IAM Policy...
|
||||
Done.
|
||||
Service [my-bun-app] revision [my-bun-app-...] has been deployed and is serving 100 percent of traffic.
|
||||
Service URL: https://my-bun-app-....us-west1.run.app
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Visit your live application">
|
||||
|
||||
🎉 Your Bun application is now live!
|
||||
|
||||
Visit the Service URL (`https://my-bun-app-....us-west1.run.app`) to confirm everything works as expected.
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
@@ -4,6 +4,8 @@ sidebarTitle: Deploy on Vercel
|
||||
mode: center
|
||||
---
|
||||
|
||||
import { ProductCard } from "/snippets/product-card.mdx";
|
||||
|
||||
[Vercel](https://vercel.com/) is a cloud platform that lets you build, deploy, and scale your apps.
|
||||
|
||||
<Warning>
|
||||
@@ -30,7 +32,7 @@ mode: center
|
||||
|
||||
Vercel automatically detects this configuration and runs your application on Bun. The value has to be `"1.x"`, Vercel handles the minor version internally.
|
||||
|
||||
For best results, match your local Bun version with the version used by Vercel.
|
||||
For best results, match your local Bun version with the version used by Vercel. **Currently, Bun version `1.2.23` is supported**.
|
||||
</Step>
|
||||
|
||||
<Step title="Next.js configuration">
|
||||
@@ -79,7 +81,7 @@ mode: center
|
||||
console.log("runtime", process.versions.bun);
|
||||
```
|
||||
```txt
|
||||
runtime 1.3.3
|
||||
runtime 1.2.23
|
||||
```
|
||||
|
||||
[See the Vercel Bun Runtime documentation for feature support →](https://vercel.com/docs/functions/runtimes/bun#feature-support)
|
||||
|
||||
@@ -22,7 +22,7 @@ bun add discord.js
|
||||
|
||||
---
|
||||
|
||||
Before we go further, we need to go to the [Discord developer portal](https://discord.com/developers/applications), login/signup, create a new _Application_, then create a new _Bot_ within that application. Follow the [official guide](https://discordjs.guide/legacy/preparations/app-setup#creating-your-bot) for step-by-step instructions.
|
||||
Before we go further, we need to go to the [Discord developer portal](https://discord.com/developers/applications), login/signup, create a new _Application_, then create a new _Bot_ within that application. Follow the [official guide](https://discordjs.guide/preparations/setting-up-a-bot-application.html#creating-your-bot) for step-by-step instructions.
|
||||
|
||||
---
|
||||
|
||||
@@ -30,7 +30,7 @@ Once complete, you'll be presented with your bot's _private key_. Let's add this
|
||||
|
||||
<Note>This is an example token that has already been invalidated.</Note>
|
||||
|
||||
```ini .env.local icon="settings"
|
||||
```txt .env.local icon="settings"
|
||||
DISCORD_TOKEN=NzkyNzE1NDU0MTk2MDg4ODQy.X-hvzA.Ovy4MCQywSkoMRRclStW4xAYK7I
|
||||
```
|
||||
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
---
|
||||
title: Use Gel with Bun
|
||||
sidebarTitle: Gel with Bun
|
||||
title: Use EdgeDB with Bun
|
||||
sidebarTitle: EdgeDB with Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
Gel (formerly EdgeDB) is a graph-relational database powered by Postgres under the hood. It provides a declarative schema language, migrations system, and object-oriented query language, in addition to supporting raw SQL queries. It solves the object-relational mapping problem at the database layer, eliminating the need for an ORM library in your application code.
|
||||
EdgeDB is a graph-relational database powered by Postgres under the hood. It provides a declarative schema language, migrations system, and object-oriented query language, in addition to supporting raw SQL queries. It solves the object-relational mapping problem at the database layer, eliminating the need for an ORM library in your application code.
|
||||
|
||||
---
|
||||
|
||||
First, [install Gel](https://docs.geldata.com/learn/installation) if you haven't already.
|
||||
First, [install EdgeDB](https://www.edgedb.com/install) if you haven't already.
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```sh Linux/macOS terminal icon="terminal"
|
||||
curl https://www.geldata.com/sh --proto "=https" -sSf1 | sh
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.edgedb.com | sh
|
||||
```
|
||||
|
||||
```sh Windows terminal icon="windows"
|
||||
irm https://www.geldata.com/ps1 | iex
|
||||
```
|
||||
|
||||
```sh Homebrew terminal icon="terminal"
|
||||
brew install geldata/tap/gel-cli
|
||||
iwr https://ps1.edgedb.com -useb | iex
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
@@ -38,35 +34,35 @@ bun init -y
|
||||
|
||||
---
|
||||
|
||||
We'll use the Gel CLI to initialize a Gel instance for our project. This creates a `gel.toml` file in our project root.
|
||||
We'll use the EdgeDB CLI to initialize an EdgeDB instance for our project. This creates an `edgedb.toml` file in our project root.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
gel project init
|
||||
edgedb project init
|
||||
```
|
||||
|
||||
```txt
|
||||
No `gel.toml` found in `/Users/colinmcd94/Documents/bun/fun/examples/my-gel-app` or above
|
||||
No `edgedb.toml` found in `/Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app` or above
|
||||
Do you want to initialize a new project? [Y/n]
|
||||
> Y
|
||||
Specify the name of Gel instance to use with this project [default: my_gel_app]:
|
||||
> my_gel_app
|
||||
Checking Gel versions...
|
||||
Specify the version of Gel to use with this project [default: x.y]:
|
||||
Specify the name of EdgeDB instance to use with this project [default: my_edgedb_app]:
|
||||
> my_edgedb_app
|
||||
Checking EdgeDB versions...
|
||||
Specify the version of EdgeDB to use with this project [default: x.y]:
|
||||
> x.y
|
||||
┌─────────────────────┬──────────────────────────────────────────────────────────────────┐
|
||||
│ Project directory │ /Users/colinmcd94/Documents/bun/fun/examples/my-gel-app │
|
||||
│ Project config │ /Users/colinmcd94/Documents/bun/fun/examples/my-gel-app/gel.toml│
|
||||
│ Schema dir (empty) │ /Users/colinmcd94/Documents/bun/fun/examples/my-gel-app/dbschema│
|
||||
│ Installation method │ portable package │
|
||||
│ Version │ x.y+6d5921b │
|
||||
│ Instance name │ my_gel_app │
|
||||
└─────────────────────┴──────────────────────────────────────────────────────────────────┘
|
||||
┌─────────────────────┬────────────────────────────────────────────────────────────────────────┐
|
||||
│ Project directory │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app │
|
||||
│ Project config │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/edgedb.toml │
|
||||
│ Schema dir (empty) │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/dbschema │
|
||||
│ Installation method │ portable package │
|
||||
│ Version │ x.y+6d5921b │
|
||||
│ Instance name │ my_edgedb_app │
|
||||
└─────────────────────┴────────────────────────────────────────────────────────────────────────┘
|
||||
Version x.y+6d5921b is already downloaded
|
||||
Initializing Gel instance...
|
||||
Initializing EdgeDB instance...
|
||||
Applying migrations...
|
||||
Everything is up to date. Revision initial
|
||||
Project initialized.
|
||||
To connect to my_gel_app, run `gel`
|
||||
To connect to my_edgedb_app, run `edgedb`
|
||||
```
|
||||
|
||||
---
|
||||
@@ -74,8 +70,8 @@ To connect to my_gel_app, run `gel`
|
||||
To see if the database is running, let's open a REPL and run a simple query.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
gel
|
||||
gel> select 1 + 1;
|
||||
edgedb
|
||||
edgedb> select 1 + 1;
|
||||
```
|
||||
|
||||
```txt
|
||||
@@ -85,12 +81,12 @@ gel> select 1 + 1;
|
||||
Then run `\quit` to exit the REPL.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
gel> \quit
|
||||
edgedb> \quit
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
With the project initialized, we can define a schema. The `gel project init` command already created a `dbschema/default.esdl` file to contain our schema.
|
||||
With the project initialized, we can define a schema. The `edgedb project init` command already created a `dbschema/default.esdl` file to contain our schema.
|
||||
|
||||
```txt File Tree icon="folder-tree"
|
||||
dbschema
|
||||
@@ -116,15 +112,15 @@ module default {
|
||||
Then generate and apply an initial migration.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
gel migration create
|
||||
edgedb migration create
|
||||
```
|
||||
|
||||
```txt
|
||||
Created /Users/colinmcd94/Documents/bun/fun/examples/my-gel-app/dbschema/migrations/00001.edgeql, id: m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq
|
||||
Created /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/dbschema/migrations/00001.edgeql, id: m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq
|
||||
```
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
gel migrate
|
||||
edgedb migrate
|
||||
```
|
||||
|
||||
```txt
|
||||
@@ -133,11 +129,11 @@ Applied m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq (00001.edgeql)
|
||||
|
||||
---
|
||||
|
||||
With our schema applied, let's execute some queries using Gel's JavaScript client library. We'll install the client library and Gel's codegen CLI, and create a `seed.ts`.file.
|
||||
With our schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun add gel
|
||||
bun add -D @gel/generate
|
||||
bun add edgedb
|
||||
bun add -D @edgedb/generate
|
||||
touch seed.ts
|
||||
```
|
||||
|
||||
@@ -148,7 +144,7 @@ Paste the following code into `seed.ts`.
|
||||
The client auto-connects to the database. We insert a couple movies using the `.execute()` method. We will use EdgeQL's `for` expression to turn this bulk insert into a single optimized query.
|
||||
|
||||
```ts seed.ts icon="/icons/typescript.svg"
|
||||
import { createClient } from "gel";
|
||||
import { createClient } from "edgedb";
|
||||
|
||||
const client = createClient();
|
||||
|
||||
@@ -188,10 +184,10 @@ Seeding complete.
|
||||
|
||||
---
|
||||
|
||||
Gel implements a number of code generation tools for TypeScript. To query our newly seeded database in a typesafe way, we'll use `@gel/generate` to code-generate the EdgeQL query builder.
|
||||
EdgeDB implements a number of code generation tools for TypeScript. To query our newly seeded database in a typesafe way, we'll use `@edgedb/generate` to code-generate the EdgeQL query builder.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bunx @gel/generate edgeql-js
|
||||
bunx @edgedb/generate edgeql-js
|
||||
```
|
||||
|
||||
```txt
|
||||
@@ -217,7 +213,7 @@ the query builder directory? The following line will be added:
|
||||
In `index.ts`, we can import the generated query builder from `./dbschema/edgeql-js` and write a simple select query.
|
||||
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
import { createClient } from "gel";
|
||||
import { createClient } from "edgedb";
|
||||
import e from "./dbschema/edgeql-js";
|
||||
|
||||
const client = createClient();
|
||||
@@ -258,4 +254,4 @@ bun run index.ts
|
||||
|
||||
---
|
||||
|
||||
For complete documentation, refer to the [Gel docs](https://docs.geldata.com/).
|
||||
For complete documentation, refer to the [EdgeDB docs](https://www.edgedb.com/docs).
|
||||
@@ -7,8 +7,8 @@ mode: center
|
||||
Express and other major Node.js HTTP libraries should work out of the box. Bun implements the [`node:http`](https://nodejs.org/api/http.html) and [`node:https`](https://nodejs.org/api/https.html) modules that these libraries rely on.
|
||||
|
||||
<Note>
|
||||
Refer to the [Runtime > Node.js APIs](/runtime/nodejs-compat#node-http) page for more detailed compatibility
|
||||
information.
|
||||
Refer to the [Runtime > Node.js APIs](https://bun.com/docs/runtime/nodejs-apis#node-http) page for more detailed
|
||||
compatibility information.
|
||||
</Note>
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
|
||||
@@ -89,4 +89,4 @@ Moo!
|
||||
|
||||
---
|
||||
|
||||
This is a simple introduction to using Mongoose with TypeScript and Bun. As you build your application, refer to the official [MongoDB](https://www.mongodb.com/docs) and [Mongoose](https://mongoosejs.com/docs/) sites for complete documentation.
|
||||
This is a simple introduction to using Mongoose with TypeScript and Bun. As you build your application, refer to the official [MongoDB](https://docs.mongodb.com/) and [Mongoose](https://mongoosejs.com/docs/) sites for complete documentation.
|
||||
|
||||
@@ -20,7 +20,7 @@ bun add -D drizzle-kit
|
||||
|
||||
Create a `.env.local` file and add your [Neon Postgres connection string](https://neon.tech/docs/connect/connect-from-any-app) to it.
|
||||
|
||||
```ini .env.local icon="settings"
|
||||
```txt .env.local icon="settings"
|
||||
DATABASE_URL=postgresql://usertitle:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
|
||||
```
|
||||
|
||||
@@ -33,7 +33,7 @@ import { neon } from "@neondatabase/serverless";
|
||||
import { drizzle } from "drizzle-orm/neon-http";
|
||||
|
||||
// Bun automatically loads the DATABASE_URL from .env.local
|
||||
// Refer to: https://bun.com/docs/runtime/environment-variables for more information
|
||||
// Refer to: https://bun.com/docs/runtime/env for more information
|
||||
const sql = neon(process.env.DATABASE_URL!);
|
||||
|
||||
export const db = drizzle(sql);
|
||||
|
||||
@@ -21,7 +21,7 @@ bun add @neondatabase/serverless
|
||||
|
||||
Create a `.env.local` file and add your [Neon Postgres connection string](https://neon.tech/docs/connect/connect-from-any-app) to it.
|
||||
|
||||
```ini .env.local icon="settings"
|
||||
```sh .env.local icon="settings"
|
||||
DATABASE_URL=postgresql://usertitle:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
|
||||
```
|
||||
|
||||
@@ -33,7 +33,7 @@ Paste the following code into your project's `index.ts` file.
|
||||
import { neon } from "@neondatabase/serverless";
|
||||
|
||||
// Bun automatically loads the DATABASE_URL from .env.local
|
||||
// Refer to: https://bun.com/docs/runtime/environment-variables for more information
|
||||
// Refer to: https://bun.com/docs/runtime/env for more information
|
||||
const sql = neon(process.env.DATABASE_URL);
|
||||
|
||||
const rows = await sql`SELECT version()`;
|
||||
|
||||
@@ -4,100 +4,54 @@ sidebarTitle: Next.js with Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
[Next.js](https://nextjs.org/) is a React framework for building full-stack web applications. It supports server-side rendering, static site generation, API routes, and more. Bun provides fast package installation and can run Next.js development and production servers.
|
||||
Initialize a Next.js app with `create-next-app`. This will scaffold a new Next.js project and automatically install dependencies.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun create next-app
|
||||
```
|
||||
|
||||
```txt
|
||||
✔ What is your project named? … my-app
|
||||
✔ Would you like to use TypeScript with this project? … No / Yes
|
||||
✔ Would you like to use ESLint with this project? … No / Yes
|
||||
✔ Would you like to use Tailwind CSS? ... No / Yes
|
||||
✔ Would you like to use `src/` directory with this project? … No / Yes
|
||||
✔ Would you like to use App Router? (recommended) ... No / Yes
|
||||
✔ What import alias would you like configured? … @/*
|
||||
Creating a new Next.js app in /path/to/my-app.
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a new Next.js app">
|
||||
Use the interactive CLI to create a new Next.js app. This will scaffold a new Next.js project and automatically install dependencies.
|
||||
You can specify a starter template using the `--example` flag.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun create next-app@latest my-bun-app
|
||||
```
|
||||
```sh
|
||||
bun create next-app --example with-supabase
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Start the dev server">
|
||||
Change to the project directory and run the dev server with Bun.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
cd my-bun-app
|
||||
bun --bun run dev
|
||||
```
|
||||
|
||||
This starts the Next.js dev server with Bun's runtime.
|
||||
|
||||
Open [`http://localhost:3000`](http://localhost:3000) with your browser to see the result. Any changes you make to `app/page.tsx` will be hot-reloaded in the browser.
|
||||
|
||||
</Step>
|
||||
<Step title="Update scripts in package.json">
|
||||
Modify the scripts field in your `package.json` by prefixing the Next.js CLI commands with `bun --bun`. This ensures that Bun executes the Next.js CLI for common tasks like `dev`, `build`, and `start`.
|
||||
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"scripts": {
|
||||
"dev": "bun --bun next dev", // [!code ++]
|
||||
"build": "bun --bun next build", // [!code ++]
|
||||
"start": "bun --bun next start", // [!code ++]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
```txt
|
||||
✔ What is your project named? … my-app
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hosting
|
||||
To start the dev server with Bun, run `bun --bun run dev` from the project root.
|
||||
|
||||
Next.js applications on Bun can be deployed to various platforms.
|
||||
|
||||
<Columns cols={3}>
|
||||
<Card title="Vercel" href="/guides/deployment/vercel" icon="/icons/ecosystem/vercel.svg">
|
||||
Deploy on Vercel
|
||||
</Card>
|
||||
<Card title="Railway" href="/guides/deployment/railway" icon="/icons/ecosystem/railway.svg">
|
||||
Deploy on Railway
|
||||
</Card>
|
||||
<Card title="DigitalOcean" href="/guides/deployment/digital-ocean" icon="/icons/ecosystem/digitalocean.svg">
|
||||
Deploy on DigitalOcean
|
||||
</Card>
|
||||
<Card title="AWS Lambda" href="/guides/deployment/aws-lambda" icon="/icons/ecosystem/aws.svg">
|
||||
Deploy on AWS Lambda
|
||||
</Card>
|
||||
<Card title="Google Cloud Run" href="/guides/deployment/google-cloud-run" icon="/icons/ecosystem/gcp.svg">
|
||||
Deploy on Google Cloud Run
|
||||
</Card>
|
||||
<Card title="Render" href="/guides/deployment/render" icon="/icons/ecosystem/render.svg">
|
||||
Deploy on Render
|
||||
</Card>
|
||||
</Columns>
|
||||
```sh terminal icon="terminal"
|
||||
cd my-app
|
||||
bun --bun run dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Templates
|
||||
To run the dev server with Node.js instead, omit `--bun`.
|
||||
|
||||
<Columns cols={2}>
|
||||
<Card
|
||||
title="Bun + Next.js Basic Starter"
|
||||
img="/images/templates/bun-nextjs-basic.png"
|
||||
href="https://github.com/bun-templates/bun-nextjs-basic"
|
||||
arrow="true"
|
||||
cta="Go to template"
|
||||
>
|
||||
A simple App Router starter with Bun, Next.js, and Tailwind CSS.
|
||||
</Card>
|
||||
<Card
|
||||
title="Todo App with Next.js + Bun"
|
||||
img="/images/templates/bun-nextjs-todo.png"
|
||||
href="https://github.com/bun-templates/bun-nextjs-todo"
|
||||
arrow="true"
|
||||
cta="Go to template"
|
||||
>
|
||||
A full-stack todo application built with Bun, Next.js, and PostgreSQL.
|
||||
</Card>
|
||||
</Columns>
|
||||
```sh terminal icon="terminal"
|
||||
cd my-app
|
||||
bun run dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
[→ See Next.js's official documentation](https://nextjs.org/docs) for more information on building and deploying Next.js applications.
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. Any changes you make to `(pages/app)/index.tsx` will be hot-reloaded in the browser.
|
||||
|
||||
@@ -14,7 +14,7 @@ bunx nuxi init my-nuxt-app
|
||||
✔ Which package manager would you like to use?
|
||||
bun
|
||||
◐ Installing dependencies...
|
||||
bun install v1.3.3 (16b4bf34)
|
||||
bun install v1.3.1 (16b4bf34)
|
||||
+ @nuxt/devtools@0.8.2
|
||||
+ nuxt@3.7.0
|
||||
785 packages installed [2.67s]
|
||||
|
||||
@@ -62,7 +62,7 @@ mode: center
|
||||
<Step title="Configure database connection">
|
||||
Set up your Postgres database URL in the `.env` file.
|
||||
|
||||
```ini .env icon="settings"
|
||||
```env .env icon="settings"
|
||||
DATABASE_URL="postgresql://username:password@localhost:5432/mydb?schema=public"
|
||||
```
|
||||
</Step>
|
||||
|
||||
@@ -161,4 +161,4 @@ mode: center
|
||||
|
||||
---
|
||||
|
||||
That's it! Now that you've set up Prisma using Bun, we recommend referring to the [official Prisma docs](https://www.prisma.io/docs/orm/prisma-client) as you continue to develop your application.
|
||||
That's it! Now that you've set up Prisma using Bun, we recommend referring to the [official Prisma docs](https://www.prisma.io/docs/concepts/components/prisma-client) as you continue to develop your application.
|
||||
|
||||
@@ -65,14 +65,14 @@ bun create qwik
|
||||
│ bun qwik add │
|
||||
│ │
|
||||
│ Relevant docs: │
|
||||
│ https://qwik.dev/docs/getting-started/ │
|
||||
│ https://qwik.builder.io/docs/getting-started/ │
|
||||
│ │
|
||||
│ Questions? Start the conversation at: │
|
||||
│ https://qwik.dev/chat │
|
||||
│ https://qwik.builder.io/chat │
|
||||
│ https://twitter.com/QwikDev │
|
||||
│ │
|
||||
│ Presentations, Podcasts and Videos: │
|
||||
│ https://qwik.dev/media/ │
|
||||
│ https://qwik.builder.io/media/ │
|
||||
│ │
|
||||
│ Next steps: │
|
||||
│ cd my-app │
|
||||
@@ -111,4 +111,4 @@ Open [http://localhost:5173](http://localhost:5173) with your browser to see the
|
||||
|
||||
---
|
||||
|
||||
Refer to the [Qwik docs](https://qwik.dev/docs/getting-started/) for complete documentation.
|
||||
Refer to the [Qwik docs](https://qwik.builder.io/docs/getting-started/) for complete documentation.
|
||||
|
||||
@@ -20,7 +20,7 @@ bun add @sentry/bun
|
||||
|
||||
Then, initialize the Sentry SDK with your Sentry DSN in your app's entry file. You can find your DSN in your Sentry project settings.
|
||||
|
||||
```ts sentry.ts icon="/icons/typescript.svg"
|
||||
```js sentry.ts icon="/icons/typescript.svg"
|
||||
import * as Sentry from "@sentry/bun";
|
||||
|
||||
// Ensure to call this before importing any other modules!
|
||||
@@ -37,7 +37,7 @@ Sentry.init({
|
||||
|
||||
You can verify that Sentry is working by capturing a test error:
|
||||
|
||||
```ts sentry.ts icon="/icons/typescript.svg"
|
||||
```js sentry.ts icon="/icons/typescript.svg"
|
||||
setTimeout(() => {
|
||||
try {
|
||||
foo();
|
||||
|
||||
@@ -88,7 +88,7 @@ To build for production, you'll need to add the right SvelteKit adapter. Current
|
||||
|
||||
Now, make the following changes to your `svelte.config.js`.
|
||||
|
||||
```js svelte.config.js icon="file-code"
|
||||
```ts svelte.config.js icon="file-code"
|
||||
import adapter from "@sveltejs/adapter-auto"; // [!code --]
|
||||
import adapter from "svelte-adapter-bun"; // [!code ++]
|
||||
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
|
||||
|
||||
@@ -1,791 +0,0 @@
|
||||
---
|
||||
title: Use TanStack Start with Bun
|
||||
sidebarTitle: TanStack Start with Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
[TanStack Start](https://tanstack.com/start/latest) is a full-stack framework powered by TanStack Router. It supports full-document SSR, streaming, server functions, bundling and more, powered by TanStack Router and [Vite](https://vite.dev/).
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a new TanStack Start app">
|
||||
Use the interactive CLI to create a new TanStack Start app.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun create @tanstack/start@latest my-tanstack-app
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Start the dev server">
|
||||
Change to the project directory and run the dev server with Bun.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
cd my-tanstack-app
|
||||
bun --bun run dev
|
||||
```
|
||||
|
||||
This starts the Vite dev server with Bun.
|
||||
|
||||
</Step>
|
||||
<Step title="Update scripts in package.json">
|
||||
Modify the scripts field in your `package.json` by prefixing the Vite CLI commands with `bun --bun`. This ensures that Bun executes the Vite CLI for common tasks like `dev`, `build`, and `preview`.
|
||||
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"scripts": {
|
||||
"dev": "bun --bun vite dev", // [!code ++]
|
||||
"build": "bun --bun vite build", // [!code ++]
|
||||
"serve": "bun --bun vite preview" // [!code ++]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
---
|
||||
|
||||
## Hosting
|
||||
|
||||
To host your TanStack Start app, you can use [Nitro](https://nitro.build/) or a custom Bun server for production deployments.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Nitro">
|
||||
<Steps>
|
||||
<Step title="Add Nitro to your project">
|
||||
Add [Nitro](https://nitro.build/) to your project. This tool allows you to deploy your TanStack Start app to different platforms.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun add nitro
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title={<span>Update your <code>vite.config.ts</code> file</span>}>
|
||||
Update your `vite.config.ts` file to include the necessary plugins for TanStack Start with Bun.
|
||||
|
||||
```ts vite.config.ts icon="/icons/typescript.svg"
|
||||
// other imports...
|
||||
import { nitro } from "nitro/vite"; // [!code ++]
|
||||
|
||||
const config = defineConfig({
|
||||
plugins: [
|
||||
tanstackStart(),
|
||||
nitro({ preset: "bun" }), // [!code ++]
|
||||
// other plugins...
|
||||
],
|
||||
});
|
||||
|
||||
export default config;
|
||||
```
|
||||
|
||||
<Note>
|
||||
The `bun` preset is optional, but it configures the build output specifically for Bun's runtime.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Update the start command">
|
||||
Make sure `build` and `start` scripts are present in your `package.json` file:
|
||||
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"scripts": {
|
||||
"build": "bun --bun vite build", // [!code ++]
|
||||
// The .output files are created by Nitro when you run `bun run build`.
|
||||
// Not necessary when deploying to Vercel.
|
||||
"start": "bun run .output/server/index.mjs" // [!code ++]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
You do **not** need the custom `start` script when deploying to Vercel.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Deploy your app">
|
||||
Check out one of our guides to deploy your app to a hosting provider.
|
||||
|
||||
<Note>
|
||||
When deploying to Vercel, you can either add the `"bunVersion": "1.x"` to your `vercel.json` file, or add it to the `nitro` config in your `vite.config.ts` file:
|
||||
|
||||
<Warning>
|
||||
Do **not** use the `bun` Nitro preset when deploying to Vercel.
|
||||
</Warning>
|
||||
|
||||
```ts vite.config.ts icon="/icons/typescript.svg"
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
tanstackStart(),
|
||||
nitro({
|
||||
preset: "bun", // [!code --]
|
||||
vercel: { // [!code ++]
|
||||
functions: { // [!code ++]
|
||||
runtime: "bun1.x", // [!code ++]
|
||||
}, // [!code ++]
|
||||
}, // [!code ++]
|
||||
}),
|
||||
],
|
||||
});
|
||||
```
|
||||
</Note>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Tab>
|
||||
<Tab title="Custom Server">
|
||||
<Note>
|
||||
This custom server implementation is based on [TanStack's Bun template](https://github.com/TanStack/router/blob/main/examples/react/start-bun/server.ts). It provides fine-grained control over static asset serving, including configurable memory management that preloads small files into memory for fast serving while serving larger files on-demand. This approach is useful when you need precise control over resource usage and asset loading behavior in production deployments.
|
||||
</Note>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create the production server">
|
||||
Create a `server.ts` file in your project root with the following custom server implementation:
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg" expandable
|
||||
/**
|
||||
* TanStack Start Production Server with Bun
|
||||
*
|
||||
* A high-performance production server for TanStack Start applications that
|
||||
* implements intelligent static asset loading with configurable memory management.
|
||||
*
|
||||
* Features:
|
||||
* - Hybrid loading strategy (preload small files, serve large files on-demand)
|
||||
* - Configurable file filtering with include/exclude patterns
|
||||
* - Memory-efficient response generation
|
||||
* - Production-ready caching headers
|
||||
*
|
||||
* Environment Variables:
|
||||
*
|
||||
* PORT (number)
|
||||
* - Server port number
|
||||
* - Default: 3000
|
||||
*
|
||||
* ASSET_PRELOAD_MAX_SIZE (number)
|
||||
* - Maximum file size in bytes to preload into memory
|
||||
* - Files larger than this will be served on-demand from disk
|
||||
* - Default: 5242880 (5MB)
|
||||
* - Example: ASSET_PRELOAD_MAX_SIZE=5242880 (5MB)
|
||||
*
|
||||
* ASSET_PRELOAD_INCLUDE_PATTERNS (string)
|
||||
* - Comma-separated list of glob patterns for files to include
|
||||
* - If specified, only matching files are eligible for preloading
|
||||
* - Patterns are matched against filenames only, not full paths
|
||||
* - Example: ASSET_PRELOAD_INCLUDE_PATTERNS="*.js,*.css,*.woff2"
|
||||
*
|
||||
* ASSET_PRELOAD_EXCLUDE_PATTERNS (string)
|
||||
* - Comma-separated list of glob patterns for files to exclude
|
||||
* - Applied after include patterns
|
||||
* - Patterns are matched against filenames only, not full paths
|
||||
* - Example: ASSET_PRELOAD_EXCLUDE_PATTERNS="*.map,*.txt"
|
||||
*
|
||||
* ASSET_PRELOAD_VERBOSE_LOGGING (boolean)
|
||||
* - Enable detailed logging of loaded and skipped files
|
||||
* - Default: false
|
||||
* - Set to "true" to enable verbose output
|
||||
*
|
||||
* ASSET_PRELOAD_ENABLE_ETAG (boolean)
|
||||
* - Enable ETag generation for preloaded assets
|
||||
* - Default: true
|
||||
* - Set to "false" to disable ETag support
|
||||
*
|
||||
* ASSET_PRELOAD_ENABLE_GZIP (boolean)
|
||||
* - Enable Gzip compression for eligible assets
|
||||
* - Default: true
|
||||
* - Set to "false" to disable Gzip compression
|
||||
*
|
||||
* ASSET_PRELOAD_GZIP_MIN_SIZE (number)
|
||||
* - Minimum file size in bytes required for Gzip compression
|
||||
* - Files smaller than this will not be compressed
|
||||
* - Default: 1024 (1KB)
|
||||
*
|
||||
* ASSET_PRELOAD_GZIP_MIME_TYPES (string)
|
||||
* - Comma-separated list of MIME types eligible for Gzip compression
|
||||
* - Supports partial matching for types ending with "/"
|
||||
* - Default: text/,application/javascript,application/json,application/xml,image/svg+xml
|
||||
*
|
||||
* Usage:
|
||||
* bun run server.ts
|
||||
*/
|
||||
|
||||
import path from 'node:path'
|
||||
|
||||
// Configuration
|
||||
const SERVER_PORT = Number(process.env.PORT ?? 3000)
|
||||
const CLIENT_DIRECTORY = './dist/client'
|
||||
const SERVER_ENTRY_POINT = './dist/server/server.js'
|
||||
|
||||
// Logging utilities for professional output
|
||||
const log = {
|
||||
info: (message: string) => {
|
||||
console.log(`[INFO] ${message}`)
|
||||
},
|
||||
success: (message: string) => {
|
||||
console.log(`[SUCCESS] ${message}`)
|
||||
},
|
||||
warning: (message: string) => {
|
||||
console.log(`[WARNING] ${message}`)
|
||||
},
|
||||
error: (message: string) => {
|
||||
console.log(`[ERROR] ${message}`)
|
||||
},
|
||||
header: (message: string) => {
|
||||
console.log(`\n${message}\n`)
|
||||
},
|
||||
}
|
||||
|
||||
// Preloading configuration from environment variables
|
||||
const MAX_PRELOAD_BYTES = Number(
|
||||
process.env.ASSET_PRELOAD_MAX_SIZE ?? 5 * 1024 * 1024, // 5MB default
|
||||
)
|
||||
|
||||
// Parse comma-separated include patterns (no defaults)
|
||||
const INCLUDE_PATTERNS = (process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? '')
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
.map((pattern: string) => convertGlobToRegExp(pattern))
|
||||
|
||||
// Parse comma-separated exclude patterns (no defaults)
|
||||
const EXCLUDE_PATTERNS = (process.env.ASSET_PRELOAD_EXCLUDE_PATTERNS ?? '')
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
.map((pattern: string) => convertGlobToRegExp(pattern))
|
||||
|
||||
// Verbose logging flag
|
||||
const VERBOSE = process.env.ASSET_PRELOAD_VERBOSE_LOGGING === 'true'
|
||||
|
||||
// Optional ETag feature
|
||||
const ENABLE_ETAG = (process.env.ASSET_PRELOAD_ENABLE_ETAG ?? 'true') === 'true'
|
||||
|
||||
// Optional Gzip feature
|
||||
const ENABLE_GZIP = (process.env.ASSET_PRELOAD_ENABLE_GZIP ?? 'true') === 'true'
|
||||
const GZIP_MIN_BYTES = Number(process.env.ASSET_PRELOAD_GZIP_MIN_SIZE ?? 1024) // 1KB
|
||||
const GZIP_TYPES = (
|
||||
process.env.ASSET_PRELOAD_GZIP_MIME_TYPES ??
|
||||
'text/,application/javascript,application/json,application/xml,image/svg+xml'
|
||||
)
|
||||
.split(',')
|
||||
.map((v) => v.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
/**
|
||||
* Convert a simple glob pattern to a regular expression
|
||||
* Supports * wildcard for matching any characters
|
||||
*/
|
||||
function convertGlobToRegExp(globPattern: string): RegExp {
|
||||
// Escape regex special chars except *, then replace * with .*
|
||||
const escapedPattern = globPattern
|
||||
.replace(/[-/\\^$+?.()|[\]{}]/g, '\\$&')
|
||||
.replace(/\*/g, '.*')
|
||||
return new RegExp(`^${escapedPattern}$`, 'i')
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute ETag for a given data buffer
|
||||
*/
|
||||
function computeEtag(data: Uint8Array): string {
|
||||
const hash = Bun.hash(data)
|
||||
return `W/"${hash.toString(16)}-${data.byteLength.toString()}"`
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata for preloaded static assets
|
||||
*/
|
||||
interface AssetMetadata {
|
||||
route: string
|
||||
size: number
|
||||
type: string
|
||||
}
|
||||
|
||||
/**
|
||||
* In-memory asset with ETag and Gzip support
|
||||
*/
|
||||
interface InMemoryAsset {
|
||||
raw: Uint8Array
|
||||
gz?: Uint8Array
|
||||
etag?: string
|
||||
type: string
|
||||
immutable: boolean
|
||||
size: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of static asset preloading process
|
||||
*/
|
||||
interface PreloadResult {
|
||||
routes: Record<string, (req: Request) => Response | Promise<Response>>
|
||||
loaded: AssetMetadata[]
|
||||
skipped: AssetMetadata[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is eligible for preloading based on configured patterns
|
||||
*/
|
||||
function isFileEligibleForPreloading(relativePath: string): boolean {
|
||||
const fileName = relativePath.split(/[/\\]/).pop() ?? relativePath
|
||||
|
||||
// If include patterns are specified, file must match at least one
|
||||
if (INCLUDE_PATTERNS.length > 0) {
|
||||
if (!INCLUDE_PATTERNS.some((pattern) => pattern.test(fileName))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// If exclude patterns are specified, file must not match any
|
||||
if (EXCLUDE_PATTERNS.some((pattern) => pattern.test(fileName))) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a MIME type is compressible
|
||||
*/
|
||||
function isMimeTypeCompressible(mimeType: string): boolean {
|
||||
return GZIP_TYPES.some((type) =>
|
||||
type.endsWith('/') ? mimeType.startsWith(type) : mimeType === type,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Conditionally compress data based on size and MIME type
|
||||
*/
|
||||
function compressDataIfAppropriate(
|
||||
data: Uint8Array,
|
||||
mimeType: string,
|
||||
): Uint8Array | undefined {
|
||||
if (!ENABLE_GZIP) return undefined
|
||||
if (data.byteLength < GZIP_MIN_BYTES) return undefined
|
||||
if (!isMimeTypeCompressible(mimeType)) return undefined
|
||||
try {
|
||||
return Bun.gzipSync(data.buffer as ArrayBuffer)
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create response handler function with ETag and Gzip support
|
||||
*/
|
||||
function createResponseHandler(
|
||||
asset: InMemoryAsset,
|
||||
): (req: Request) => Response {
|
||||
return (req: Request) => {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': asset.type,
|
||||
'Cache-Control': asset.immutable
|
||||
? 'public, max-age=31536000, immutable'
|
||||
: 'public, max-age=3600',
|
||||
}
|
||||
|
||||
if (ENABLE_ETAG && asset.etag) {
|
||||
const ifNone = req.headers.get('if-none-match')
|
||||
if (ifNone && ifNone === asset.etag) {
|
||||
return new Response(null, {
|
||||
status: 304,
|
||||
headers: { ETag: asset.etag },
|
||||
})
|
||||
}
|
||||
headers.ETag = asset.etag
|
||||
}
|
||||
|
||||
if (
|
||||
ENABLE_GZIP &&
|
||||
asset.gz &&
|
||||
req.headers.get('accept-encoding')?.includes('gzip')
|
||||
) {
|
||||
headers['Content-Encoding'] = 'gzip'
|
||||
headers['Content-Length'] = String(asset.gz.byteLength)
|
||||
const gzCopy = new Uint8Array(asset.gz)
|
||||
return new Response(gzCopy, { status: 200, headers })
|
||||
}
|
||||
|
||||
headers['Content-Length'] = String(asset.raw.byteLength)
|
||||
const rawCopy = new Uint8Array(asset.raw)
|
||||
return new Response(rawCopy, { status: 200, headers })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create composite glob pattern from include patterns
|
||||
*/
|
||||
function createCompositeGlobPattern(): Bun.Glob {
|
||||
const raw = (process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? '')
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
if (raw.length === 0) return new Bun.Glob('**/*')
|
||||
if (raw.length === 1) return new Bun.Glob(raw[0])
|
||||
return new Bun.Glob(`{${raw.join(',')}}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize static routes with intelligent preloading strategy
|
||||
* Small files are loaded into memory, large files are served on-demand
|
||||
*/
|
||||
async function initializeStaticRoutes(
|
||||
clientDirectory: string,
|
||||
): Promise<PreloadResult> {
|
||||
const routes: Record<string, (req: Request) => Response | Promise<Response>> =
|
||||
{}
|
||||
const loaded: AssetMetadata[] = []
|
||||
const skipped: AssetMetadata[] = []
|
||||
|
||||
log.info(`Loading static assets from ${clientDirectory}...`)
|
||||
if (VERBOSE) {
|
||||
console.log(
|
||||
`Max preload size: ${(MAX_PRELOAD_BYTES / 1024 / 1024).toFixed(2)} MB`,
|
||||
)
|
||||
if (INCLUDE_PATTERNS.length > 0) {
|
||||
console.log(
|
||||
`Include patterns: ${process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? ''}`,
|
||||
)
|
||||
}
|
||||
if (EXCLUDE_PATTERNS.length > 0) {
|
||||
console.log(
|
||||
`Exclude patterns: ${process.env.ASSET_PRELOAD_EXCLUDE_PATTERNS ?? ''}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let totalPreloadedBytes = 0
|
||||
|
||||
try {
|
||||
const glob = createCompositeGlobPattern()
|
||||
for await (const relativePath of glob.scan({ cwd: clientDirectory })) {
|
||||
const filepath = path.join(clientDirectory, relativePath)
|
||||
const route = `/${relativePath.split(path.sep).join(path.posix.sep)}`
|
||||
|
||||
try {
|
||||
// Get file metadata
|
||||
const file = Bun.file(filepath)
|
||||
|
||||
// Skip if file doesn't exist or is empty
|
||||
if (!(await file.exists()) || file.size === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
const metadata: AssetMetadata = {
|
||||
route,
|
||||
size: file.size,
|
||||
type: file.type || 'application/octet-stream',
|
||||
}
|
||||
|
||||
// Determine if file should be preloaded
|
||||
const matchesPattern = isFileEligibleForPreloading(relativePath)
|
||||
const withinSizeLimit = file.size <= MAX_PRELOAD_BYTES
|
||||
|
||||
if (matchesPattern && withinSizeLimit) {
|
||||
// Preload small files into memory with ETag and Gzip support
|
||||
const bytes = new Uint8Array(await file.arrayBuffer())
|
||||
const gz = compressDataIfAppropriate(bytes, metadata.type)
|
||||
const etag = ENABLE_ETAG ? computeEtag(bytes) : undefined
|
||||
const asset: InMemoryAsset = {
|
||||
raw: bytes,
|
||||
gz,
|
||||
etag,
|
||||
type: metadata.type,
|
||||
immutable: true,
|
||||
size: bytes.byteLength,
|
||||
}
|
||||
routes[route] = createResponseHandler(asset)
|
||||
|
||||
loaded.push({ ...metadata, size: bytes.byteLength })
|
||||
totalPreloadedBytes += bytes.byteLength
|
||||
} else {
|
||||
// Serve large or filtered files on-demand
|
||||
routes[route] = () => {
|
||||
const fileOnDemand = Bun.file(filepath)
|
||||
return new Response(fileOnDemand, {
|
||||
headers: {
|
||||
'Content-Type': metadata.type,
|
||||
'Cache-Control': 'public, max-age=3600',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
skipped.push(metadata)
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error && error.name !== 'EISDIR') {
|
||||
log.error(`Failed to load ${filepath}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Show detailed file overview only when verbose mode is enabled
|
||||
if (VERBOSE && (loaded.length > 0 || skipped.length > 0)) {
|
||||
const allFiles = [...loaded, ...skipped].sort((a, b) =>
|
||||
a.route.localeCompare(b.route),
|
||||
)
|
||||
|
||||
// Calculate max path length for alignment
|
||||
const maxPathLength = Math.min(
|
||||
Math.max(...allFiles.map((f) => f.route.length)),
|
||||
60,
|
||||
)
|
||||
|
||||
// Format file size with KB and actual gzip size
|
||||
const formatFileSize = (bytes: number, gzBytes?: number) => {
|
||||
const kb = bytes / 1024
|
||||
const sizeStr = kb < 100 ? kb.toFixed(2) : kb.toFixed(1)
|
||||
|
||||
if (gzBytes !== undefined) {
|
||||
const gzKb = gzBytes / 1024
|
||||
const gzStr = gzKb < 100 ? gzKb.toFixed(2) : gzKb.toFixed(1)
|
||||
return {
|
||||
size: sizeStr,
|
||||
gzip: gzStr,
|
||||
}
|
||||
}
|
||||
|
||||
// Rough gzip estimation (typically 30-70% compression) if no actual gzip data
|
||||
const gzipKb = kb * 0.35
|
||||
return {
|
||||
size: sizeStr,
|
||||
gzip: gzipKb < 100 ? gzipKb.toFixed(2) : gzipKb.toFixed(1),
|
||||
}
|
||||
}
|
||||
|
||||
if (loaded.length > 0) {
|
||||
console.log('\n📁 Preloaded into memory:')
|
||||
console.log(
|
||||
'Path │ Size │ Gzip Size',
|
||||
)
|
||||
loaded
|
||||
.sort((a, b) => a.route.localeCompare(b.route))
|
||||
.forEach((file) => {
|
||||
const { size, gzip } = formatFileSize(file.size)
|
||||
const paddedPath = file.route.padEnd(maxPathLength)
|
||||
const sizeStr = `${size.padStart(7)} kB`
|
||||
const gzipStr = `${gzip.padStart(7)} kB`
|
||||
console.log(`${paddedPath} │ ${sizeStr} │ ${gzipStr}`)
|
||||
})
|
||||
}
|
||||
|
||||
if (skipped.length > 0) {
|
||||
console.log('\n💾 Served on-demand:')
|
||||
console.log(
|
||||
'Path │ Size │ Gzip Size',
|
||||
)
|
||||
skipped
|
||||
.sort((a, b) => a.route.localeCompare(b.route))
|
||||
.forEach((file) => {
|
||||
const { size, gzip } = formatFileSize(file.size)
|
||||
const paddedPath = file.route.padEnd(maxPathLength)
|
||||
const sizeStr = `${size.padStart(7)} kB`
|
||||
const gzipStr = `${gzip.padStart(7)} kB`
|
||||
console.log(`${paddedPath} │ ${sizeStr} │ ${gzipStr}`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Show detailed verbose info if enabled
|
||||
if (VERBOSE) {
|
||||
if (loaded.length > 0 || skipped.length > 0) {
|
||||
const allFiles = [...loaded, ...skipped].sort((a, b) =>
|
||||
a.route.localeCompare(b.route),
|
||||
)
|
||||
console.log('\n📊 Detailed file information:')
|
||||
console.log(
|
||||
'Status │ Path │ MIME Type │ Reason',
|
||||
)
|
||||
allFiles.forEach((file) => {
|
||||
const isPreloaded = loaded.includes(file)
|
||||
const status = isPreloaded ? 'MEMORY' : 'ON-DEMAND'
|
||||
const reason =
|
||||
!isPreloaded && file.size > MAX_PRELOAD_BYTES
|
||||
? 'too large'
|
||||
: !isPreloaded
|
||||
? 'filtered'
|
||||
: 'preloaded'
|
||||
const route =
|
||||
file.route.length > 30
|
||||
? file.route.substring(0, 27) + '...'
|
||||
: file.route
|
||||
console.log(
|
||||
`${status.padEnd(12)} │ ${route.padEnd(30)} │ ${file.type.padEnd(28)} │ ${reason.padEnd(10)}`,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
console.log('\n📊 No files found to display')
|
||||
}
|
||||
}
|
||||
|
||||
// Log summary after the file list
|
||||
console.log() // Empty line for separation
|
||||
if (loaded.length > 0) {
|
||||
log.success(
|
||||
`Preloaded ${String(loaded.length)} files (${(totalPreloadedBytes / 1024 / 1024).toFixed(2)} MB) into memory`,
|
||||
)
|
||||
} else {
|
||||
log.info('No files preloaded into memory')
|
||||
}
|
||||
|
||||
if (skipped.length > 0) {
|
||||
const tooLarge = skipped.filter((f) => f.size > MAX_PRELOAD_BYTES).length
|
||||
const filtered = skipped.length - tooLarge
|
||||
log.info(
|
||||
`${String(skipped.length)} files will be served on-demand (${String(tooLarge)} too large, ${String(filtered)} filtered)`,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
log.error(
|
||||
`Failed to load static files from ${clientDirectory}: ${String(error)}`,
|
||||
)
|
||||
}
|
||||
|
||||
return { routes, loaded, skipped }
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the server
|
||||
*/
|
||||
async function initializeServer() {
|
||||
log.header('Starting Production Server')
|
||||
|
||||
// Load TanStack Start server handler
|
||||
let handler: { fetch: (request: Request) => Response | Promise<Response> }
|
||||
try {
|
||||
const serverModule = (await import(SERVER_ENTRY_POINT)) as {
|
||||
default: { fetch: (request: Request) => Response | Promise<Response> }
|
||||
}
|
||||
handler = serverModule.default
|
||||
log.success('TanStack Start application handler initialized')
|
||||
} catch (error) {
|
||||
log.error(`Failed to load server handler: ${String(error)}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Build static routes with intelligent preloading
|
||||
const { routes } = await initializeStaticRoutes(CLIENT_DIRECTORY)
|
||||
|
||||
// Create Bun server
|
||||
const server = Bun.serve({
|
||||
port: SERVER_PORT,
|
||||
|
||||
routes: {
|
||||
// Serve static assets (preloaded or on-demand)
|
||||
...routes,
|
||||
|
||||
// Fallback to TanStack Start handler for all other routes
|
||||
'/*': (req: Request) => {
|
||||
try {
|
||||
return handler.fetch(req)
|
||||
} catch (error) {
|
||||
log.error(`Server handler error: ${String(error)}`)
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
// Global error handler
|
||||
error(error) {
|
||||
log.error(
|
||||
`Uncaught server error: ${error instanceof Error ? error.message : String(error)}`,
|
||||
)
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
},
|
||||
})
|
||||
|
||||
log.success(`Server listening on http://localhost:${String(server.port)}`)
|
||||
}
|
||||
|
||||
// Initialize the server
|
||||
initializeServer().catch((error: unknown) => {
|
||||
log.error(`Failed to start server: ${String(error)}`)
|
||||
process.exit(1)
|
||||
})
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Update package.json scripts">
|
||||
Add a `start` script to run the custom server:
|
||||
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"scripts": {
|
||||
"build": "bun --bun vite build",
|
||||
"start": "bun run server.ts" // [!code ++]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Build and run">
|
||||
Build your application and start the server:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun run build
|
||||
bun run start
|
||||
```
|
||||
|
||||
The server will start on port 3000 by default (configurable via `PORT` environment variable).
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
<Columns cols={3}>
|
||||
<Card title="Vercel" href="/guides/deployment/vercel" icon="/icons/ecosystem/vercel.svg">
|
||||
Deploy on Vercel
|
||||
</Card>
|
||||
<Card title="Render" href="/guides/deployment/render" icon="/icons/ecosystem/render.svg">
|
||||
Deploy on Render
|
||||
</Card>
|
||||
<Card title="Railway" href="/guides/deployment/railway" icon="/icons/ecosystem/railway.svg">
|
||||
Deploy on Railway
|
||||
</Card>
|
||||
<Card title="DigitalOcean" href="/guides/deployment/digital-ocean" icon="/icons/ecosystem/digitalocean.svg">
|
||||
Deploy on DigitalOcean
|
||||
</Card>
|
||||
<Card title="AWS Lambda" href="/guides/deployment/aws-lambda" icon="/icons/ecosystem/aws.svg">
|
||||
Deploy on AWS Lambda
|
||||
</Card>
|
||||
<Card title="Google Cloud Run" href="/guides/deployment/google-cloud-run" icon="/icons/ecosystem/gcp.svg">
|
||||
Deploy on Google Cloud Run
|
||||
</Card>
|
||||
</Columns>
|
||||
|
||||
---
|
||||
|
||||
## Templates
|
||||
|
||||
<Columns cols={2}>
|
||||
<Card
|
||||
title="Todo App with Tanstack + Bun"
|
||||
img="/images/templates/bun-tanstack-todo.png"
|
||||
href="https://github.com/bun-templates/bun-tanstack-todo"
|
||||
arrow="true"
|
||||
cta="Go to template"
|
||||
>
|
||||
A Todo application built with Bun, TanStack Start, and PostgreSQL.
|
||||
</Card>
|
||||
<Card
|
||||
title="Bun + TanStack Start Application"
|
||||
img="/images/templates/bun-tanstack-basic.png"
|
||||
href="https://github.com/bun-templates/bun-tanstack-basic"
|
||||
arrow="true"
|
||||
cta="Go to template"
|
||||
>
|
||||
A TanStack Start template using Bun with SSR and file-based routing.
|
||||
</Card>
|
||||
<Card
|
||||
title="Basic Bun + Tanstack Starter"
|
||||
img="/images/templates/bun-tanstack-start.png"
|
||||
href="https://github.com/bun-templates/bun-tanstack-start"
|
||||
arrow="true"
|
||||
cta="Go to template"
|
||||
>
|
||||
The basic TanStack starter using the Bun runtime and Bun's file APIs.
|
||||
</Card>
|
||||
</Columns>
|
||||
|
||||
---
|
||||
|
||||
[→ See TanStack Start's official documentation](https://tanstack.com/start/latest/docs/framework/react/guide/hosting) for more information on hosting.
|
||||
@@ -1,87 +0,0 @@
|
||||
---
|
||||
title: Bun Redis with Upstash
|
||||
sidebarTitle: Upstash with Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
[Upstash](https://upstash.com/) is a fully managed Redis database as a service. Upstash works with the Redis® API, which means you can use Bun's native Redis client to connect to your Upstash database.
|
||||
|
||||
<Note>TLS is enabled by default for all Upstash Redis databases.</Note>
|
||||
|
||||
---
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a new project">
|
||||
Create a new project by running `bun init`:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun init bun-upstash-redis
|
||||
cd bun-upstash-redis
|
||||
```
|
||||
</Step>
|
||||
<Step title="Create an Upstash Redis database">
|
||||
Go to the [Upstash dashboard](https://console.upstash.com/) and create a new Redis database. After completing the [getting started guide](https://upstash.com/docs/redis/overall/getstarted), you'll see your database page with connection information.
|
||||
|
||||
The database page displays two connection methods; HTTP and TLS. For Bun's Redis client, you need the **TLS** connection details. This URL starts with `rediss://`.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
</Step>
|
||||
<Step title="Connect using Bun's Redis client">
|
||||
You can connect to Upstash by setting environment variables with Bun's default `redis` client.
|
||||
|
||||
Set the `REDIS_URL` environment variable in your `.env` file using the Redis endpoint (not the REST URL):
|
||||
|
||||
```ini .env icon="settings"
|
||||
REDIS_URL=rediss://********@********.upstash.io:6379
|
||||
```
|
||||
|
||||
Bun's Redis client reads connection information from `REDIS_URL` by default:
|
||||
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
import { redis } from "bun";
|
||||
|
||||
// Reads from process.env.REDIS_URL automatically
|
||||
await redis.set("counter", "0"); // [!code ++]
|
||||
```
|
||||
|
||||
Alternatively, you can create a custom client using `RedisClient`:
|
||||
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const redis = new RedisClient(process.env.REDIS_URL); // [!code ++]
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Use the Redis client">
|
||||
You can now use the Redis client to interact with your Upstash Redis database:
|
||||
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
import { redis } from "bun";
|
||||
|
||||
// Get a value
|
||||
let counter = await redis.get("counter");
|
||||
|
||||
// Set a value if it doesn't exist
|
||||
if (!counter) {
|
||||
await redis.set("counter", "0");
|
||||
}
|
||||
|
||||
// Increment the counter
|
||||
await redis.incr("counter");
|
||||
|
||||
// Get the updated value
|
||||
counter = await redis.get("counter");
|
||||
console.log(counter);
|
||||
```
|
||||
```txt
|
||||
1
|
||||
```
|
||||
|
||||
The Redis client automatically handles connections in the background. No need to manually connect or disconnect for basic operations.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
@@ -74,4 +74,4 @@ bunx --bun vite build
|
||||
|
||||
---
|
||||
|
||||
This is a stripped down guide to get you started with Vite + Bun. For more information, see the [Vite documentation](https://vite.dev/guide/).
|
||||
This is a stripped down guide to get you started with Vite + Bun. For more information, see the [Vite documentation](https://vitejs.dev/guide/).
|
||||
|
||||
@@ -6,7 +6,7 @@ mode: center
|
||||
|
||||
## Extract links from a webpage
|
||||
|
||||
Bun's [HTMLRewriter](/runtime/html-rewriter) API can be used to efficiently extract links from HTML content. It works by chaining together CSS selectors to match the elements, text, and attributes you want to process. This is a simple example of how to extract links from a webpage. You can pass `.transform` a `Response`, `Blob`, or `string`.
|
||||
Bun's [HTMLRewriter](https://bun.com/docs/api/html-rewriter) API can be used to efficiently extract links from HTML content. It works by chaining together CSS selectors to match the elements, text, and attributes you want to process. This is a simple example of how to extract links from a webpage. You can pass `.transform` a `Response`, `Blob`, or `string`.
|
||||
|
||||
```ts extract-links.ts icon="/icons/typescript.svg"
|
||||
async function extractLinks(url: string) {
|
||||
@@ -37,7 +37,6 @@ await extractLinks("https://bun.com");
|
||||
|
||||
When scraping websites, you often want to convert relative URLs (like `/docs`) to absolute URLs. Here's how to handle URL resolution:
|
||||
|
||||
{/* prettier-ignore */}
|
||||
```ts extract-links.ts icon="/icons/typescript.svg"
|
||||
async function extractLinksFromURL(url: string) {
|
||||
const response = await fetch(url);
|
||||
@@ -48,11 +47,13 @@ async function extractLinksFromURL(url: string) {
|
||||
const href = el.getAttribute("href");
|
||||
if (href) {
|
||||
// Convert relative URLs to absolute // [!code ++]
|
||||
try { // [!code ++]
|
||||
try {
|
||||
// [!code ++]
|
||||
const absoluteURL = new URL(href, url).href; // [!code ++]
|
||||
links.add(absoluteURL);
|
||||
} catch { // [!code ++]
|
||||
links.add(href); // [!code ++]
|
||||
links.add(absoluteURL); // [!code ++]
|
||||
} catch {
|
||||
// [!code ++]
|
||||
links.add(href);
|
||||
} // [!code ++]
|
||||
}
|
||||
},
|
||||
@@ -68,4 +69,4 @@ const websiteLinks = await extractLinksFromURL("https://example.com");
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > HTMLRewriter](/runtime/html-rewriter) for complete documentation on HTML transformation with Bun.
|
||||
See [Docs > API > HTMLRewriter](https://bun.com/docs/api/html-rewriter) for complete documentation on HTML transformation with Bun.
|
||||
|
||||
@@ -6,7 +6,7 @@ mode: center
|
||||
|
||||
## Extract social share images and Open Graph tags
|
||||
|
||||
Bun's [HTMLRewriter](/runtime/html-rewriter) API can be used to efficiently extract social share images and Open Graph metadata from HTML content. This is particularly useful for building link preview features, social media cards, or web scrapers. We can use HTMLRewriter to match CSS selectors to HTML elements, text, and attributes we want to process.
|
||||
Bun's [HTMLRewriter](https://bun.com/docs/api/html-rewriter) API can be used to efficiently extract social share images and Open Graph metadata from HTML content. This is particularly useful for building link preview features, social media cards, or web scrapers. We can use HTMLRewriter to match CSS selectors to HTML elements, text, and attributes we want to process.
|
||||
|
||||
```ts extract-social-meta.ts icon="/icons/typescript.svg"
|
||||
interface SocialMetadata {
|
||||
|
||||
@@ -63,9 +63,8 @@ Our form will send a `POST` request to the `/action` endpoint with the form data
|
||||
|
||||
First we use the [`.formData()`](https://developer.mozilla.org/en-US/docs/Web/API/Request/formData) method on the incoming `Request` to asynchronously parse its contents to a `FormData` instance. Then we can use the [`.get()`](https://developer.mozilla.org/en-US/docs/Web/API/FormData/get) method to extract the value of the `name` and `profilePicture` fields. Here `name` corresponds to a `string` and `profilePicture` is a `Blob`.
|
||||
|
||||
Finally, we write the `Blob` to disk using [`Bun.write()`](/runtime/file-io#writing-files-bun-write).
|
||||
Finally, we write the `Blob` to disk using [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write).
|
||||
|
||||
{/* prettier-ignore */}
|
||||
```ts index.ts icon="/icons/typescript.svg"
|
||||
const server = Bun.serve({
|
||||
port: 4000,
|
||||
@@ -81,7 +80,8 @@ const server = Bun.serve({
|
||||
});
|
||||
|
||||
// parse formdata at /action // [!code ++]
|
||||
if (url.pathname === "/action") { // [!code ++]
|
||||
if (url.pathname === "/action") {
|
||||
// [!code ++]
|
||||
const formdata = await req.formData(); // [!code ++]
|
||||
const name = formdata.get("name"); // [!code ++]
|
||||
const profilePicture = formdata.get("profilePicture"); // [!code ++]
|
||||
|
||||
@@ -4,7 +4,7 @@ sidebarTitle: Hot reload an HTTP server
|
||||
mode: center
|
||||
---
|
||||
|
||||
Bun supports the [`--hot`](/runtime/watch-mode#hot-mode) flag to run a file with hot reloading enabled. When any module or file changes, Bun re-runs the file.
|
||||
Bun supports the [`--hot`](https://bun.com/docs/runtime/hot#hot-mode) flag to run a file with hot reloading enabled. When any module or file changes, Bun re-runs the file.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --hot run index.ts
|
||||
|
||||
@@ -6,7 +6,7 @@ mode: center
|
||||
|
||||
This starts an HTTP server listening on port `3000`. It demonstrates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
|
||||
|
||||
See [`Bun.serve`](/runtime/http/server) for details.
|
||||
See [`Bun.serve`](https://bun.com/docs/api/http) for details.
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
const server = Bun.serve({
|
||||
|
||||
@@ -6,7 +6,7 @@ mode: center
|
||||
|
||||
This starts an HTTP server listening on port `3000`. It responds to all requests with a `Response` with status `200` and body `"Welcome to Bun!"`.
|
||||
|
||||
See [`Bun.serve`](/runtime/http/server) for details.
|
||||
See [`Bun.serve`](https://bun.com/docs/api/http) for details.
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
const server = Bun.serve({
|
||||
|
||||
@@ -4,7 +4,7 @@ sidebarTitle: Stream file response
|
||||
mode: center
|
||||
---
|
||||
|
||||
This snippet reads a file from disk using [`Bun.file()`](/runtime/file-io#reading-files-bun-file). This returns a `BunFile` instance, which can be passed directly into the `new Response` constructor.
|
||||
This snippet reads a file from disk using [`Bun.file()`](https://bun.com/docs/api/file-io#reading-files-bun-file). This returns a `BunFile` instance, which can be passed directly into the `new Response` constructor.
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
const path = "/path/to/file.txt";
|
||||
@@ -32,7 +32,7 @@ new Response(Bun.file("./img.png")).headers.get("Content-Type");
|
||||
|
||||
---
|
||||
|
||||
Putting it all together with [`Bun.serve()`](/runtime/http/server).
|
||||
Putting it all together with [`Bun.serve()`](https://bun.com/docs/api/http#bun-serve).
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
// static file server
|
||||
@@ -47,4 +47,4 @@ Bun.serve({
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`.
|
||||
See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`.
|
||||
|
||||
@@ -4,7 +4,7 @@ sidebarTitle: Configure TLS
|
||||
mode: center
|
||||
---
|
||||
|
||||
Set the `tls` key to configure TLS. Both `key` and `cert` are required. The `key` should be the contents of your private key; `cert` should be the contents of your issued certificate. Use [`Bun.file()`](/runtime/file-io#reading-files-bun-file) to read the contents.
|
||||
Set the `tls` key to configure TLS. Both `key` and `cert` are required. The `key` should be the contents of your private key; `cert` should be the contents of your issued certificate. Use [`Bun.file()`](https://bun.com/docs/api/file-io#reading-files-bun-file) to read the contents.
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
const server = Bun.serve({
|
||||
|
||||
@@ -25,4 +25,4 @@ This will add the package to `devDependencies` in `package.json`.
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -33,8 +33,6 @@ bun add git@github.com:lodash/lodash.git
|
||||
bun add github:colinhacks/zod
|
||||
```
|
||||
|
||||
**Note:** GitHub dependencies download via HTTP tarball when possible for faster installation.
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -24,4 +24,4 @@ This will add the package to `optionalDependencies` in `package.json`.
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -17,7 +17,7 @@ This will add the package to `peerDependencies` in `package.json`.
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"peerDependencies": {
|
||||
"@types/bun": "^1.3.3" // [!code ++]
|
||||
"@types/bun": "^1.3.1" // [!code ++]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -26,14 +26,14 @@ This will add the package to `peerDependencies` in `package.json`.
|
||||
|
||||
Running `bun install` will install peer dependencies by default, unless marked optional in `peerDependenciesMeta`.
|
||||
|
||||
{/* prettier-ignore */}
|
||||
```json package.json icon="file-json"
|
||||
{
|
||||
"peerDependencies": {
|
||||
"@types/bun": "^1.3.3"
|
||||
"@types/bun": "^1.3.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/bun": { // [!code ++]
|
||||
"@types/bun": {
|
||||
// [!code ++]
|
||||
"optional": true // [!code ++]
|
||||
} // [!code ++]
|
||||
}
|
||||
@@ -42,4 +42,4 @@ Running `bun install` will install peer dependencies by default, unless marked o
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -32,4 +32,4 @@ import { z } from "zod";
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -41,4 +41,4 @@ bun add zod@next
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -25,15 +25,15 @@ To use it with `bun install`, add a `bunfig.toml` file to your project with the
|
||||
[install.registry]
|
||||
url = "https://pkgs.dev.azure.com/my-azure-artifacts-user/_packaging/my-azure-artifacts-user/npm/registry"
|
||||
username = "my-azure-artifacts-user"
|
||||
# You can use an environment variable here
|
||||
# Bun v1.0.3+ supports using an environment variable here
|
||||
password = "$NPM_PASSWORD"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then assign your Azure Personal Access Token to the `NPM_PASSWORD` environment variable. Bun [automatically reads](/runtime/environment-variables) `.env` files, so create a file called `.env` in your project root. There is no need to base-64 encode this token! Bun will do this for you.
|
||||
Then assign your Azure Personal Access Token to the `NPM_PASSWORD` environment variable. Bun [automatically reads](https://bun.com/docs/runtime/env) `.env` files, so create a file called `.env` in your project root. There is no need to base-64 encode this token! Bun will do this for you.
|
||||
|
||||
```ini .env icon="settings"
|
||||
```txt .env icon="settings"
|
||||
NPM_PASSWORD=<paste token here>
|
||||
```
|
||||
|
||||
@@ -43,7 +43,7 @@ NPM_PASSWORD=<paste token here>
|
||||
|
||||
---
|
||||
|
||||
To configure Azure Artifacts without `bunfig.toml`, you can set the `NPM_CONFIG_REGISTRY` environment variable. The URL should include `:username` and `:_password` as query parameters. Replace `<USERNAME>` and `<PASSWORD>` with the appropriate values.
|
||||
To configure Azure Artifacts without `bunfig.toml`, you can set the `NPM_CONFIG_REGISTRY` environment variable. The URL should include `:username` and `:_password` as query parameters. Replace `<USERNAME>` and `<PASSWORD>` with the apprropriate values.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
NPM_CONFIG_REGISTRY=https://pkgs.dev.azure.com/my-azure-artifacts-user/_packaging/my-azure-artifacts-user/npm/registry/:username=<USERNAME>:_password=<PASSWORD>
|
||||
|
||||
@@ -20,7 +20,7 @@ registry = "https://usertitle:password@registry.npmjs.org"
|
||||
|
||||
---
|
||||
|
||||
Your `bunfig.toml` can reference environment variables. Bun automatically loads environment variables from `.env.local`, `.env.[NODE_ENV]`, and `.env`. See [Docs > Environment variables](/runtime/environment-variables) for more information.
|
||||
Your `bunfig.toml` can reference environment variables. Bun automatically loads environment variables from `.env.local`, `.env.[NODE_ENV]`, and `.env`. See [Docs > Environment variables](https://bun.com/docs/runtime/env) for more information.
|
||||
|
||||
```toml bunfig.toml icon="settings"
|
||||
[install]
|
||||
@@ -29,4 +29,4 @@ registry = { url = "https://registry.npmjs.org", token = "$npm_token" }
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](/pm/cli/install) for complete documentation of Bun's package manager.
|
||||
See [Docs > Package manager](https://bun.com/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user