Compare commits

..

1 Commits

Author SHA1 Message Date
Jarred Sumner
f94a8097ac Add cleanup function to ResolvedSource 2025-01-18 12:04:46 -08:00
3445 changed files with 158279 additions and 364625 deletions

View File

@@ -1,12 +1,12 @@
ARG LLVM_VERSION="19"
ARG REPORTED_LLVM_VERSION="19.1.7"
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION

View File

@@ -107,9 +107,9 @@ const buildPlatforms = [
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
];
@@ -134,9 +134,9 @@ const testPlatforms = [
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
];
@@ -323,7 +323,6 @@ function getCppAgent(platform, options) {
*/
function getZigAgent(platform, options) {
const { arch } = platform;
return {
queue: "build-zig",
};
@@ -384,22 +383,14 @@ function getBuildEnv(target, options) {
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
const isMusl = abi === "musl";
let CMAKE_BUILD_TYPE = release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo";
if (isMusl && release) {
CMAKE_BUILD_TYPE = "MinSizeRel";
}
return {
CMAKE_BUILD_TYPE,
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
CANARY_REVISION: revision,
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
ENABLE_LOGS: release ? "OFF" : "ON",
ABI: isMusl ? "musl" : undefined,
CMAKE_TLS_VERIFY: "0",
ABI: abi === "musl" ? "musl" : undefined,
};
}
@@ -471,7 +462,6 @@ function getBuildZigStep(platform, options) {
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
timeout_in_minutes: 35,
};
}
@@ -628,21 +618,6 @@ function getReleaseStep(buildPlatforms, options) {
};
}
/**
* @returns {Step}
*/
function getBenchmarkStep() {
return {
key: "benchmark",
label: "📊",
agents: {
queue: "build-zig",
},
command: "bun .buildkite/scripts/upload-benchmark.ts",
depends_on: [`linux-x64-build-bun`],
};
}
/**
* @typedef {Object} Pipeline
* @property {Step[]} [steps]
@@ -1121,8 +1096,6 @@ async function getPipeline(options = {}) {
steps.push(getReleaseStep(buildPlatforms, options));
}
steps.push(getBenchmarkStep());
/** @type {Map<string, GroupStep>} */
const stepsByGroup = new Map();

View File

@@ -1,7 +0,0 @@
import { getCommit, getSecret } from "../../scripts/utils.mjs";
console.log("Submitting...");
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
method: "POST",
});
console.log("Got status " + response.status);

View File

@@ -158,36 +158,25 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_discord_announcement() {
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
if [ -z "$value" ]; then
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local version="$1"
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local short_sha="${commit:0:7}"
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
local artifact_path="${commit}"
if [ "$version" == "canary" ]; then
local json_payload=$(cat <<EOF
{
"embeds": [{
"title": "New Bun Canary now available",
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
"color": 16023551,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}]
}
EOF
)
curl -H "Content-Type: application/json" \
-d "$json_payload" \
-sf \
"$value" >/dev/null
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
@@ -201,8 +190,6 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
@@ -240,7 +227,7 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_discord_announcement "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -6,3 +6,6 @@ CompileFlags:
Diagnostics:
UnusedIncludes: None
HeaderInsertion:
IncludeBlocks: Preserve # Do not auto-include headers.

View File

@@ -1,27 +0,0 @@
---
description: How to build Bun
globs:
---
# How to build Bun
## CMake
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
* `CMakeLists.txt`
* `cmake/`
* `Globals.cmake` - macros and functions used by all the other files
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
## How to
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
```sh
bun run build # builds a debug build: `build/debug/bun-debug`
bun run build:release # builds a release build: `build/release/bun`
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
```

View File

@@ -1,139 +0,0 @@
---
description: Writing HMR/Dev Server tests
globs: test/bake/*
---
# Writing HMR/Dev Server tests
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
## File Structure
- `test/bake/bake-harness.ts` - shared utilities and test harness
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
- class `Dev` (controls subprocess for dev server)
- class `Client` (controls a happy-dom subprocess for having the page open)
- more helpers
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
## Categories
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
css.test.ts - CSS tests concern bundling bugs with CSS files
plugins.test.ts - Plugin tests concern plugins in development mode.
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
esm.test.ts - ESM tests are about various esm features in development mode.
html.test.ts - HTML tests are tests relating to HTML files themselves.
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
sourcemap.test.ts - Tests verifying source-maps are correct.
## `devTest` Basics
A test takes in two primary inputs: `files` and `async test(dev) {`
```ts
import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
"index.html": emptyHtmlFile({
scripts: ["/script.ts"],
body: "<h1>Hello</h1>",
}),
"script.ts": `
console.log("hello");
`,
},
async test(dev) {
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.patch("index.html", {
find: "Hello",
replace: "World",
});
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
// Works
await using c = await dev.client("/");
await c.expectMessage("hello");
// Editing HTML reloads
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "World",
replace: "Hello",
});
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "Hello",
replace: "Bar",
});
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("script.ts", {
find: "hello",
replace: "world",
});
});
await c.expectMessage("world");
},
});
```
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
## Testing for bundling errors
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
```ts
devTest("import then create", {
files: {
"index.html": `
<!DOCTYPE html>
<html>
<head></head>
<body>
<script type="module" src="/script.ts"></script>
</body>
</html>
`,
"script.ts": `
import data from "./data";
console.log(data);
`,
},
async test(dev) {
const c = await dev.client("/", {
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
});
await c.expectReload(async () => {
await dev.write("data.ts", "export default 'data';");
});
await c.expectMessage("data");
},
});
```
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
```ts
await dev.delete("other.ts", {
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
});
```

View File

@@ -1,408 +0,0 @@
---
description: JavaScript class implemented in C++
globs: *.cpp
---
# Implementing JavaScript classes in C++
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSBinding.cpp](mdc:src/bun.js/bindings/NodeFSBinding.cpp).
- class FooPrototype : public JSC::JSNonFinalObject
- class FooConstructor : public JSC::InternalFunction
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
Generally, defining the subspace looks like this:
```c++
class Foo : public JSC::DestructibleObject {
// ...
template<typename MyClassT, JSC::SubspaceAccess mode>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
}
```
It's better to put it in the .cpp file instead of the .h file, when possible.
## Defining properties
Define properties on the prototype. Use a const HashTableValues like this:
```C++
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
};
```
### Creating a prototype class
Follow a pattern like this:
```c++
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
public:
using Base = JSC::JSNonFinalObject;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
{
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
prototype->finishCreation(vm);
return prototype;
}
template<typename, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.plainObjectSpace();
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
structure->setMayBePrototype(true);
return structure;
}
private:
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure)
{
}
void finishCreation(JSC::VM& vm);
};
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
void JSX509CertificatePrototype::finishCreation(VM& vm)
{
Base::finishCreation(vm);
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
}
} // namespace Bun
```
### Getter definition:
```C++
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
return {};
}
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
}
```
### Setter definition
```C++
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
{
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject))
return false;
JSValue value = JSValue::decode(encodedValue);
if (!value.isCell()) {
// TODO:
return true;
}
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
return true;
}
```
### Function definition
```C++
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
return {};
}
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
}
```
### Constructor definition
```C++
JSC_DECLARE_HOST_FUNCTION(callStats);
JSC_DECLARE_HOST_FUNCTION(constructStats);
class JSStatsConstructor final : public JSC::InternalFunction {
public:
using Base = JSC::InternalFunction;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
{
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
constructor->finishCreation(vm, prototype);
return constructor;
}
DECLARE_INFO;
template<typename CellType, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.internalFunctionSpace();
}
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
}
private:
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure, callStats, constructStats)
{
}
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
{
Base::finishCreation(vm, 0, "Stats"_s);
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
}
};
```
### Structure caching
If there's a class, prototype, and constructor:
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
```c++#ZigGlobalObject.cpp
void GlobalObject::finishCreation(VM& vm) {
// ...
m_JSStatsBigIntClassStructure.initLater(
[](LazyClassStructure::Initializer& init) {
// Call the function to initialize our class structure.
Bun::initJSBigIntStatsClassStructure(init);
});
```
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
{
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
});
```
Then, implement the function that creates the structure:
```c++
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
{
// If there is a prototype:
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (!callFrame->argumentCount()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
return {};
}
JSValue arg = callFrame->uncheckedArgument(0);
if (!arg.isCell()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
return {};
}
auto* zigGlobalObject = defaultGlobalObject(globalObject);
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
JSValue newTarget = callFrame->newTarget();
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
auto scope = DECLARE_THROW_SCOPE(vm);
if (!newTarget) {
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
return {};
}
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
RETURN_IF_EXCEPTION(scope, {});
structure = InternalFunction::createSubclassStructure(
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
scope.release();
}
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
}
```
### Expose to Zig
To expose the constructor to zig:
```c++
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
{
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
}
```
Zig:
```zig
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
```
To create an object (instance) of a JS class defined in C++ from Zig, follow the __toJS convention like this:
```c++
// X509* is whatever we need to create the object
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
{
// ... implementation details
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
}
```
And from Zig:
```zig
const X509 = opaque {
// ... class
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
return Bun__X509__toJS(globalObject, this);
}
};
```

View File

@@ -1,91 +0,0 @@
---
description: Writing tests for Bun
globs:
---
# Writing tests for Bun
## Where tests are found
You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
## How tests are written
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
```js
import { describe, test, expect } from "bun:test";
import assert, { AssertionError } from "assert";
describe("assert(expr)", () => {
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
expect(() => assert(expr)).not.toThrow();
});
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
expect(() => assert(expr)).toThrow(AssertionError);
});
});
```
## Testing conventions
* See `test/harness.ts` for common test utilities and helpers
* Be rigorous and test for edge-cases and unexpected inputs
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
* When you need to test Bun as a CLI, use the following pattern:
```js
import { test, expect } from "bun:test";
import { spawn } from "bun";
import { bunExe, bunEnv } from "harness";
test("bun --version", async () => {
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
cmd: [bunExe(), "--version"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [ exitCode, stdout, stderr ] = await Promise.all([
exited,
new Response(stdoutStream).text(),
new Response(stderrStream).text(),
]);
expect({ exitCode, stdout, stderr }).toMatchObject({
exitCode: 0,
stdout: expect.stringContaining(Bun.version),
stderr: "",
});
});
```
## Before writing a test
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
* Always attempt to find related tests in an existing test file before creating a new test file

1
.gitattributes vendored
View File

@@ -15,7 +15,6 @@
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2

View File

@@ -4,7 +4,7 @@ description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:

View File

@@ -10,9 +10,9 @@ on:
merge_group:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
BUN_VERSION: "1.1.44"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-format:

View File

@@ -10,9 +10,9 @@ on:
merge_group:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
BUN_VERSION: "1.1.44"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-tidy:

View File

@@ -5,7 +5,7 @@ on:
workflow_dispatch:
env:
BUN_VERSION: "1.2.0"
BUN_VERSION: "1.1.44"
OXLINT_VERSION: "0.15.0"
jobs:

View File

@@ -1,55 +0,0 @@
name: Packages CI
on:
push:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
pull_request:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
env:
BUN_VERSION: "canary"
jobs:
bun-plugin-svelte:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install dependencies
run: |
bun install
pushd ./packages/bun-plugin-svelte && bun install
- name: Lint
run: |
bunx oxlint@0.15 --format github --deny-warnings
bunx prettier --config ../../.prettierrc --check .
working-directory: ./packages/bun-plugin-svelte
- name: Check types
run: bun check:types
working-directory: ./packages/bun-plugin-svelte
- name: Test
run: bun test
working-directory: ./packages/bun-plugin-svelte

View File

@@ -10,7 +10,7 @@ on:
merge_group:
env:
BUN_VERSION: "1.2.0"
BUN_VERSION: "1.1.44"
jobs:
prettier-format:

View File

@@ -44,10 +44,6 @@ on:
description: Should types be released to npm?
type: boolean
default: false
use-definitelytyped:
description: "Should types be PR'd to DefinitelyTyped?"
type: boolean
default: false
jobs:
sign:
@@ -70,7 +66,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Sign Release
@@ -98,7 +94,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Release
@@ -127,7 +123,7 @@ jobs:
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
@@ -159,52 +155,6 @@ jobs:
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
definitelytyped:
name: Make pr to DefinitelyTyped to update `bun-types` version
runs-on: ubuntu-latest
needs: npm-types
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
permissions:
contents: read
steps:
- name: Checkout (DefinitelyTyped)
uses: actions/checkout@v4
with:
repository: DefinitelyTyped/DefinitelyTyped
- name: Checkout (bun)
uses: actions/checkout@v4
with:
path: bun
- name: Setup Bun
uses: ./bun/.github/actions/setup-bun
with:
bun-version: "1.2.0"
- id: bun-version
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
- name: Update bun-types version in package.json
run: |
bun -e '
const file = Bun.file("./types/bun/package.json");
const json = await file.json();
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
json.dependencies["bun-types"] = version;
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
await file.write(JSON.stringify(json, null, 4) + "\n");
'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
with:
token: ${{ secrets.ROBOBUN_TOKEN }}
add-paths: ./types/bun/package.json
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
body: |
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
https://bun.sh/blog/${{ env.BUN_VERSION }}
push-to-fork: oven-sh/DefinitelyTyped
branch: ${{env.BUN_VERSION}}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
@@ -231,7 +181,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
@@ -239,7 +189,7 @@ jobs:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
@@ -256,7 +206,7 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
@@ -315,7 +265,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.0"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Release
@@ -359,7 +309,7 @@ jobs:
uses: ./.github/actions/setup-bun
if: ${{ env.BUN_LATEST == 'true' }}
with:
bun-version: "1.2.0"
bun-version: "1.1.44"
- name: Bump version
uses: ./.github/actions/bump
if: ${{ env.BUN_LATEST == 'true' }}

View File

@@ -3,7 +3,8 @@ name: Lint
permissions:
contents: read
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: 16
BUN_VERSION: "1.1.44"
on:
workflow_call:

View File

@@ -9,7 +9,7 @@ on:
required: true
env:
BUN_VERSION: "1.2.0"
BUN_VERSION: "1.1.44"
jobs:
bump:

View File

@@ -89,6 +89,4 @@ jobs:
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -89,6 +89,4 @@ jobs:
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -89,6 +89,4 @@ jobs:
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -89,6 +89,4 @@ jobs:
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -89,6 +89,4 @@ jobs:
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,82 +0,0 @@
name: Daily Root Certs Update Check
on:
schedule:
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
workflow_dispatch: # Allows manual trigger
jobs:
check-and-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Generate root certs and capture output
id: generate-certs
run: |
cd packages/bun-usockets/
OUTPUT=$(bun generate-root-certs.mjs -v)
echo "cert_output<<EOF" >> $GITHUB_ENV
echo "$OUTPUT" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Check for changes and stage files
id: check-changes
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Found changes, staging modified files..."
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Get list of modified files and add them
git status --porcelain | while read -r status file; do
# Remove leading status and whitespace
file=$(echo "$file" | sed 's/^.* //')
echo "Adding changed file: $file"
git add "$file"
done
echo "changes=true" >> $GITHUB_OUTPUT
# Store the list of changed files
CHANGED_FILES=$(git status --porcelain)
echo "changed_files<<EOF" >> $GITHUB_ENV
echo "$CHANGED_FILES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No changes detected"
echo "changes=false" >> $GITHUB_OUTPUT
fi
- name: Create Pull Request
if: steps.check-changes.outputs.changes == 'true'
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
title: "update(root_certs) $(date +'%Y-%m-%d')"
body: |
Automated root certificates update
${{ env.cert_output }}
## Changed Files:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:
- "automation"
- "root-certs"

View File

@@ -55,7 +55,7 @@ jobs:
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
@@ -106,6 +106,4 @@ jobs:
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

View File

@@ -10,7 +10,7 @@ on:
merge_group:
env:
BUN_VERSION: "1.2.0"
BUN_VERSION: "1.1.44"
jobs:
zig-format:

6
.gitignore vendored
View File

@@ -16,7 +16,6 @@
.vscode/clang*
.vscode/cpp*
.zig-cache
.bake-debug
*.a
*.bc
*.big
@@ -152,7 +151,6 @@ src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
# Dependencies
/vendor
@@ -180,6 +178,4 @@ tmp
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
*.bun-build
scratch*.{js,ts,tsx,cjs,mjs}

View File

@@ -1,16 +1,4 @@
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"

View File

@@ -7,4 +7,3 @@ src/react-refresh.js
*.min.js
test/snippets
test/js/node/test
bun.lock

51
.vscode/launch.json generated vendored
View File

@@ -22,6 +22,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -37,6 +38,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -58,6 +60,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -73,6 +76,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -88,6 +92,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -103,6 +108,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -119,6 +125,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -140,6 +147,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -161,6 +169,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -179,6 +188,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -193,6 +203,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -210,6 +221,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -224,6 +236,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -240,6 +253,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -261,6 +275,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -282,6 +297,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -297,6 +313,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -312,6 +329,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -327,6 +345,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -342,6 +361,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -358,6 +378,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -379,6 +400,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -399,6 +421,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// bun test [*]
{
@@ -414,6 +437,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -428,6 +452,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -443,6 +468,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -462,6 +488,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -476,6 +503,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// Windows: bun test [file]
{
@@ -708,10 +736,6 @@
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
},
{
"name": "BUN_DEBUG_SYS",
"value": "1",
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2",
@@ -1101,24 +1125,7 @@
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
},
{
"type": "bun",
"name": "[JS] bun test [file]",
"runtime": "${workspaceFolder}/build/debug/bun-debug",
"runtimeArgs": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
},
{
"type": "midas-rr",
"request": "attach",
"name": "rr",
"trace": "Off",
"setupCommands": ["handle SIGPWR nostop noprint pass"],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
],
"inputs": [

View File

@@ -36,7 +36,6 @@
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {
@@ -64,7 +63,6 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["--header-insertion=never"],
// JavaScript
"prettier.enable": true,
@@ -141,11 +139,8 @@
"packages/bun-uws/fuzzing": true,
},
"files.associations": {
"*.css": "tailwindcss",
"*.idl": "cpp",
"*.mdc": "markdown",
"array": "cpp",
"ios": "cpp",
},
"C_Cpp.files.exclude": {
"**/.vscode": true,

View File

@@ -12,12 +12,6 @@ list(APPEND CMAKE_MODULE_PATH
include(Policies)
include(Globals)
if (CMAKE_HOST_WIN32)
# Workaround for TLS certificate verification issue on Windows when downloading from GitHub
# Remove this once we've bumped the CI machines build image
set(CMAKE_TLS_VERIFY 0)
endif()
# --- Compilers ---
if(CMAKE_HOST_APPLE)

View File

@@ -67,7 +67,7 @@ $ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
```
```bash#Arch
$ sudo pacman -S llvm clang18 lld
$ sudo pacman -S llvm clang lld
```
```bash#Fedora
@@ -80,7 +80,7 @@ $ sudo zypper install clang18 lld18 llvm18
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
Make sure Clang/LLVM 18 is in your path:
@@ -205,30 +205,18 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
# Clone WebKit into ./vendor/WebKit
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
$ git -C vendor/WebKit checkout <commit_hash>
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
$ make jsc-debug
# Build bun with the local JSC build
$ bun run build:local
```
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
## Troubleshooting
### 'span' file not found on Ubuntu

2
LATEST
View File

@@ -1 +1 @@
1.2.5
1.1.44

View File

@@ -91,9 +91,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-19 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-19 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-19 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-16 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-16 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-16 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -117,14 +117,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@19)
LLVM_PREFIX = $(shell brew --prefix llvm@16)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@19' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@16' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -164,7 +164,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-19-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
@@ -191,7 +191,7 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-19 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-16 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
@@ -255,7 +255,7 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
endif
ifeq ($(OS_NAME),darwin)
_MIMALLOC_OBJECT_FILE = 0
JSC_BUILD_STEPS += jsc-build-mac
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
_MIMALLOC_FILE = libmimalloc.a
_MIMALLOC_INPUT_PATH = libmimalloc.a
@@ -286,7 +286,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-19 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-16 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -674,7 +674,7 @@ endif
.PHONY: assert-deps
assert-deps:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "19" ]; then echo -e "ERROR: clang version >=19 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@19"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@16"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -924,7 +924,7 @@ bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-debug
jsc-debug: jsc-build-debug
.PHONY: jsc-build
@@ -1154,7 +1154,7 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
@@ -1205,7 +1205,7 @@ jsc-copy-headers-debug:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
@@ -1261,7 +1261,6 @@ jsc-build-mac-compile:
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1285,7 +1284,6 @@ jsc-build-mac-compile-lto:
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DCMAKE_C_FLAGS="-flto=full" \
-DCMAKE_CXX_FLAGS="-flto=full" \
-DENABLE_FTL_JIT=ON \
@@ -1301,7 +1299,6 @@ jsc-build-mac-compile-lto:
.PHONY: jsc-build-mac-compile-debug
jsc-build-mac-compile-debug:
mkdir -p $(WEBKIT_DEBUG_DIR) $(WEBKIT_DIR);
# to disable asan, remove -DENABLE_SANITIZERS=address and add -DENABLE_MALLOC_HEAP_BREAKDOWN=ON
cd $(WEBKIT_DEBUG_DIR) && \
ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" \
cmake \
@@ -1310,9 +1307,9 @@ jsc-build-mac-compile-debug:
-DCMAKE_BUILD_TYPE=Debug \
-DUSE_THIN_ARCHIVES=OFF \
-DENABLE_FTL_JIT=ON \
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-G Ninja \
@@ -1321,7 +1318,6 @@ jsc-build-mac-compile-debug:
-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DUSE_VISIBILITY_ATTRIBUTE=1 \
-DENABLE_SANITIZERS=address \
$(WEBKIT_DIR) \
$(WEBKIT_DEBUG_DIR) && \
CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \
@@ -1338,7 +1334,6 @@ jsc-build-linux-compile-config:
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
@@ -1362,7 +1357,6 @@ jsc-build-linux-compile-config-debug:
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
@@ -1381,14 +1375,14 @@ jsc-build-linux-compile-config-debug:
jsc-build-linux-compile-build:
mkdir -p $(WEBKIT_RELEASE_DIR) && \
cd $(WEBKIT_RELEASE_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON -DUSE_BUN_EVENT_LOOP=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
.PHONY: jsc-build-linux-compile-build-debug
jsc-build-linux-compile-build-debug:
mkdir -p $(WEBKIT_DEBUG_DIR) && \
cd $(WEBKIT_DEBUG_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON -DUSE_BUN_EVENT_LOOP=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
cmake --build $(WEBKIT_DEBUG_DIR) --config Debug --target jsc

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,53 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Pre-generate DH params to avoid including setup in benchmarks
const dhSize = 1024; // Reduced from 2048 for faster testing
const dh = crypto.createDiffieHellman(dhSize);
const dhPrime = dh.getPrime();
const dhGenerator = dh.getGenerator();
// Classical Diffie-Hellman
bench("DH - generateKeys", () => {
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
return alice.generateKeys();
});
bench("DH - computeSecret", () => {
// Setup
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
const aliceKey = alice.generateKeys();
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with prime256v1 (P-256)
bench("ECDH-P256 - generateKeys", () => {
const ecdh = crypto.createECDH("prime256v1");
return ecdh.generateKeys();
});
bench("ECDH-P256 - computeSecret", () => {
// Setup
const alice = crypto.createECDH("prime256v1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("prime256v1");
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with secp384r1 (P-384)
bench("ECDH-P384 - computeSecret", () => {
const alice = crypto.createECDH("secp384r1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("secp384r1");
const bobKey = bob.generateKeys();
return alice.computeSecret(bobKey);
});
await run();

View File

@@ -1,44 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
function generateTestKeyPairs() {
const curves = crypto.getCurves();
const keys = {};
for (const curve of curves) {
const ecdh = crypto.createECDH(curve);
ecdh.generateKeys();
keys[curve] = {
compressed: ecdh.getPublicKey("hex", "compressed"),
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
instance: ecdh,
};
}
return keys;
}
const testKeys = generateTestKeyPairs();
bench("ECDH key format - P256 compressed to uncompressed", () => {
const publicKey = testKeys["prime256v1"].compressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P256 uncompressed to compressed", () => {
const publicKey = testKeys["prime256v1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
});
bench("ECDH key format - P384 compressed to uncompressed", () => {
const publicKey = testKeys["secp384r1"].compressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P384 uncompressed to compressed", () => {
const publicKey = testKeys["secp384r1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Sample keys with different lengths
const keys = {
short: "secret",
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
};
// Test parameters
const salts = ["", "salt"];
const infos = ["", "info"];
const hashes = ["sha256", "sha512"];
const sizes = [10, 1024];
// Benchmark sync HKDF
for (const hash of hashes) {
for (const keyName of Object.keys(keys)) {
const key = keys[keyName];
for (const size of sizes) {
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
return crypto.hkdfSync(hash, key, "salt", "info", size);
});
}
}
}
// Benchmark different combinations of salt and info
for (const salt of salts) {
for (const info of infos) {
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
});
}
}
// Benchmark async HKDF (using promises for cleaner benchmark)
// Note: async benchmarks in Mitata require returning a Promise
for (const hash of hashes) {
bench(`hkdf ${hash} async`, async () => {
return new Promise((resolve, reject) => {
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
if (err) reject(err);
else resolve(derivedKey);
});
});
});
}
await run();

View File

@@ -1,43 +0,0 @@
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
import { bench, run } from "../runner.mjs";
const prime512 = generatePrimeSync(512);
const prime2048 = generatePrimeSync(2048);
bench("checkPrimeSync 512", () => {
return checkPrimeSync(prime512);
});
bench("checkPrimeSync 2048", () => {
return checkPrimeSync(prime2048);
});
bench("checkPrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
await Promise.all(promises);
});
bench("checkPrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
await Promise.all(promises);
});
bench("generatePrimeSync 512", () => {
return generatePrimeSync(512);
});
bench("generatePrimeSync 2048", () => {
return generatePrimeSync(2048);
});
bench("generatePrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
await Promise.all(promises);
});
bench("generatePrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
await Promise.all(promises);
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "crypto";
import { bench, run } from "../runner.mjs";
bench("randomInt - sync", () => {
crypto.randomInt(1000);
});
bench("randomInt - async", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomInt(1000, () => {
resolve();
});
await promise;
});
bench("randonBytes - 32", () => {
crypto.randomBytes(32);
});
bench("randomBytes - 256", () => {
crypto.randomBytes(256);
});
const buf = Buffer.alloc(256);
bench("randomFill - 32", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 32, () => {
resolve();
});
await promise;
});
bench("randomFill - 256", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 256, () => {
resolve();
});
await promise;
});
bench("randomFillSync - 32", () => {
crypto.randomFillSync(buf, 0, 32);
});
bench("randomFillSync - 256", () => {
crypto.randomFillSync(buf, 0, 256);
});
await run();

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "expect-to-equal",
@@ -136,7 +136,7 @@
"@jest/console": ["@jest/console@29.4.3", "", { "dependencies": { "@jest/types": "^29.4.3", "@types/node": "*", "chalk": "^4.0.0", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "slash": "^3.0.0" } }, "sha512-W/o/34+wQuXlgqlPYTansOSiBnuxrTv61dEVkA6HNmpcgHLUjfaUbdqt6oVvOzaawwo9IdW9QOtMgQ1ScSZC4A=="],
"@jest/core": ["@jest/core@29.4.3", "", { "dependencies": { "@jest/console": "^29.4.3", "@jest/reporters": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.4.3", "jest-config": "^29.4.3", "jest-haste-map": "^29.4.3", "jest-message-util": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-resolve-dependencies": "^29.4.3", "jest-runner": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "jest-watcher": "^29.4.3", "micromatch": "^4.0.4", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-56QvBq60fS4SPZCuM7T+7scNrkGIe7Mr6PVIXUpu48ouvRaWOFqRPV91eifvFM0ay2HmfswXiGf97NGUN5KofQ=="],
"@jest/core": ["@jest/core@29.4.3", "", { "dependencies": { "@jest/console": "^29.4.3", "@jest/reporters": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.4.3", "jest-config": "^29.4.3", "jest-haste-map": "^29.4.3", "jest-message-util": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-resolve-dependencies": "^29.4.3", "jest-runner": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "jest-watcher": "^29.4.3", "micromatch": "^4.0.4", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" } }, "sha512-56QvBq60fS4SPZCuM7T+7scNrkGIe7Mr6PVIXUpu48ouvRaWOFqRPV91eifvFM0ay2HmfswXiGf97NGUN5KofQ=="],
"@jest/environment": ["@jest/environment@29.4.3", "", { "dependencies": { "@jest/fake-timers": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "jest-mock": "^29.4.3" } }, "sha512-dq5S6408IxIa+lr54zeqce+QgI+CJT4nmmA+1yzFgtcsGK8c/EyiUb9XQOgz3BMKrRDfKseeOaxj2eO8LlD3lA=="],
@@ -148,7 +148,7 @@
"@jest/globals": ["@jest/globals@29.4.3", "", { "dependencies": { "@jest/environment": "^29.4.3", "@jest/expect": "^29.4.3", "@jest/types": "^29.4.3", "jest-mock": "^29.4.3" } }, "sha512-8BQ/5EzfOLG7AaMcDh7yFCbfRLtsc+09E1RQmRBI4D6QQk4m6NSK/MXo+3bJrBN0yU8A2/VIcqhvsOLFmziioA=="],
"@jest/reporters": ["@jest/reporters@29.4.3", "", { "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@jridgewell/trace-mapping": "^0.3.15", "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", "istanbul-reports": "^3.1.3", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "jest-worker": "^29.4.3", "slash": "^3.0.0", "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-sr2I7BmOjJhyqj9ANC6CTLsL4emMoka7HkQpcoMRlhCbQJjz2zsRzw0BDPiPyEFDXAbxKgGFYuQZiSJ1Y6YoTg=="],
"@jest/reporters": ["@jest/reporters@29.4.3", "", { "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@jridgewell/trace-mapping": "^0.3.15", "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", "istanbul-reports": "^3.1.3", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "jest-worker": "^29.4.3", "slash": "^3.0.0", "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" } }, "sha512-sr2I7BmOjJhyqj9ANC6CTLsL4emMoka7HkQpcoMRlhCbQJjz2zsRzw0BDPiPyEFDXAbxKgGFYuQZiSJ1Y6YoTg=="],
"@jest/schemas": ["@jest/schemas@29.4.3", "", { "dependencies": { "@sinclair/typebox": "^0.25.16" } }, "sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg=="],
@@ -384,13 +384,13 @@
"istanbul-reports": ["istanbul-reports@3.1.5", "", { "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" } }, "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w=="],
"jest": ["jest@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/types": "^29.4.3", "import-local": "^3.0.2", "jest-cli": "^29.4.3" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-XvK65feuEFGZT8OO0fB/QAQS+LGHvQpaadkH5p47/j3Ocqq3xf2pK9R+G0GzgfuhXVxEv76qCOOcMb5efLk6PA=="],
"jest": ["jest@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/types": "^29.4.3", "import-local": "^3.0.2", "jest-cli": "^29.4.3" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "bin": { "jest": "bin/jest.js" } }, "sha512-XvK65feuEFGZT8OO0fB/QAQS+LGHvQpaadkH5p47/j3Ocqq3xf2pK9R+G0GzgfuhXVxEv76qCOOcMb5efLk6PA=="],
"jest-changed-files": ["jest-changed-files@29.4.3", "", { "dependencies": { "execa": "^5.0.0", "p-limit": "^3.1.0" } }, "sha512-Vn5cLuWuwmi2GNNbokPOEcvrXGSGrqVnPEZV7rC6P7ck07Dyw9RFnvWglnupSh+hGys0ajGtw/bc2ZgweljQoQ=="],
"jest-circus": ["jest-circus@29.4.3", "", { "dependencies": { "@jest/environment": "^29.4.3", "@jest/expect": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "chalk": "^4.0.0", "co": "^4.6.0", "dedent": "^0.7.0", "is-generator-fn": "^2.0.0", "jest-each": "^29.4.3", "jest-matcher-utils": "^29.4.3", "jest-message-util": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "p-limit": "^3.1.0", "pretty-format": "^29.4.3", "slash": "^3.0.0", "stack-utils": "^2.0.3" } }, "sha512-Vw/bVvcexmdJ7MLmgdT3ZjkJ3LKu8IlpefYokxiqoZy6OCQ2VAm6Vk3t/qHiAGUXbdbJKJWnc8gH3ypTbB/OBw=="],
"jest-cli": ["jest-cli@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "import-local": "^3.0.2", "jest-config": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "prompts": "^2.0.1", "yargs": "^17.3.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-PiiAPuFNfWWolCE6t3ZrDXQc6OsAuM3/tVW0u27UWc1KE+n/HSn5dSE6B2juqN7WP+PP0jAcnKtGmI4u8GMYCg=="],
"jest-cli": ["jest-cli@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "import-local": "^3.0.2", "jest-config": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "prompts": "^2.0.1", "yargs": "^17.3.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "bin": { "jest": "bin/jest.js" } }, "sha512-PiiAPuFNfWWolCE6t3ZrDXQc6OsAuM3/tVW0u27UWc1KE+n/HSn5dSE6B2juqN7WP+PP0jAcnKtGmI4u8GMYCg=="],
"jest-config": ["jest-config@29.4.3", "", { "dependencies": { "@babel/core": "^7.11.6", "@jest/test-sequencer": "^29.4.3", "@jest/types": "^29.4.3", "babel-jest": "^29.4.3", "chalk": "^4.0.0", "ci-info": "^3.2.0", "deepmerge": "^4.2.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "jest-circus": "^29.4.3", "jest-environment-node": "^29.4.3", "jest-get-type": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-runner": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "micromatch": "^4.0.4", "parse-json": "^5.2.0", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "peerDependencies": { "@types/node": "*", "ts-node": ">=9.0.0" }, "optionalPeers": ["ts-node"] }, "sha512-eCIpqhGnIjdUCXGtLhz4gdDoxKSWXKjzNcc5r+0S1GKOp2fwOipx5mRcwa9GB/ArsxJ1jlj2lmlD9bZAsBxaWQ=="],
@@ -600,7 +600,7 @@
"vite": ["vite@4.1.2", "", { "dependencies": { "esbuild": "^0.16.14", "fsevents": "~2.3.2", "postcss": "^8.4.21", "resolve": "^1.22.1", "rollup": "^3.10.0" }, "peerDependencies": { "@types/node": ">= 14", "less": "*", "sass": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" }, "optionalPeers": ["less", "sass", "stylus", "sugarss", "terser"], "bin": { "vite": "bin/vite.js" } }, "sha512-MWDb9Rfy3DI8omDQySbMK93nQqStwbsQWejXRY2EBzEWKmLAXWb1mkI9Yw2IJrc+oCvPCI1Os5xSSIBYY6DEAw=="],
"vitest": ["vitest@0.25.8", "", { "dependencies": { "@types/chai": "^4.3.4", "@types/chai-subset": "^1.3.3", "@types/node": "*", "acorn": "^8.8.1", "acorn-walk": "^8.2.0", "chai": "^4.3.7", "debug": "^4.3.4", "local-pkg": "^0.4.2", "source-map": "^0.6.1", "strip-literal": "^1.0.0", "tinybench": "^2.3.1", "tinypool": "^0.3.0", "tinyspy": "^1.0.2", "vite": "^3.0.0 || ^4.0.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@vitest/browser": "*", "@vitest/ui": "*", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-X75TApG2wZTJn299E/TIYevr4E9/nBo1sUtZzn0Ci5oK8qnpZAZyhwg0qCeMSakGIWtc6oRwcQFyFfW14aOFWg=="],
"vitest": ["vitest@0.25.8", "", { "dependencies": { "@types/chai": "^4.3.4", "@types/chai-subset": "^1.3.3", "@types/node": "*", "acorn": "^8.8.1", "acorn-walk": "^8.2.0", "chai": "^4.3.7", "debug": "^4.3.4", "local-pkg": "^0.4.2", "source-map": "^0.6.1", "strip-literal": "^1.0.0", "tinybench": "^2.3.1", "tinypool": "^0.3.0", "tinyspy": "^1.0.2", "vite": "^3.0.0 || ^4.0.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@vitest/browser": "*", "@vitest/ui": "*", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-X75TApG2wZTJn299E/TIYevr4E9/nBo1sUtZzn0Ci5oK8qnpZAZyhwg0qCeMSakGIWtc6oRwcQFyFfW14aOFWg=="],
"walker": ["walker@1.0.8", "", { "dependencies": { "makeerror": "1.0.12" } }, "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ=="],

View File

@@ -1,175 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -1,37 +0,0 @@
# express benchmark
This benchmarks a hello world express server.
To install dependencies:
```bash
bun install
```
To run in Bun:
```sh
bun ./express.mjs
```
To run in Node:
```sh
node ./express.mjs
```
To run in Deno:
```sh
deno run -A ./express.mjs
```
To benchmark each runtime:
```bash
oha http://localhost:3000 -n 500000 -H "Accept-Encoding: identity"
```
We recommend using `oha` or `bombardier` for benchmarking. We do not recommend using `ab`, as it uses HTTP/1.0 which stopped being used by web browsers in the early 2000s. We also do not recommend using autocannon, as the node:http client is not performant enough to measure the throughput of Bun's HTTP server.
Note the `Accept-Encoding: identity` header exists to prevent Deno's HTTP server from compressing the response.

Binary file not shown.

View File

@@ -1,14 +0,0 @@
// See the README.md for more information
import express from "express";
const app = express();
const port = process.env.PORT || 3000;
let i = 0;
app.get("/", (req, res) => {
res.send("Hello World! (request number: " + i++ + ")");
});
app.listen(port, () => {
console.log(`Express server listening on port ${port}`);
});

View File

@@ -1,14 +0,0 @@
{
"name": "express",
"module": "index.ts",
"type": "module",
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"express": "5"
}
}

View File

@@ -1,27 +0,0 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,27 +1,19 @@
import { Glob } from "bun";
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
function benchPattern(name, glob, pattern) {
bench(name, () => {
new Glob(glob).match(pattern);
})
}
benchPattern("max-depth" , "1{2,3{4,5{6,7{8,9{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "13579bdfhjlm");
benchPattern("non-ascii", "😎/¢£.{ts,tsx,js,jsx}", "😎/¢£.jsx");
benchPattern("utf8", "フォルダ/**/*", "フォルダ/aaa.js");
benchPattern("non-ascii+max-depth" , "1{2,3{4,5{6,7{8,😎{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "1357😎bdfhjlm");
benchPattern("pretty-average", "test/{foo/**,bar}/baz", "test/bar/baz");
benchPattern("pretty-average-2", "a/**/c/*.md", "a/bb.bb/aa/b.b/aa/c/xyz.md");
benchPattern("pretty-average-3", "a/b/**/c{d,e}/**/xyz.md", "a/b/cd/xyz.md");
benchPattern("pretty-average-4", "foo/bar/**/one/**/*.*", "foo/bar/baz/one/two/three/image.png");
benchPattern("long-pretty-average", "some/**/needle.{js,tsx,mdx,ts,jsx,txt}", "some/a/bigger/path/to/the/crazy/needle.txt");
benchPattern("brackets-lots", "f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "foo-bar");
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
min_max: true,
percentiles: true,
avg: true,
})
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,19 +0,0 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "simple-react",
@@ -1045,7 +1045,7 @@
"ncp": ["ncp@2.0.0", "", { "bin": { "ncp": "./bin/ncp" } }, "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA=="],
"next": ["next@12.3.4", "", { "dependencies": { "@next/env": "12.3.4", "@next/swc-android-arm-eabi": "12.3.4", "@next/swc-android-arm64": "12.3.4", "@next/swc-darwin-arm64": "12.3.4", "@next/swc-darwin-x64": "12.3.4", "@next/swc-freebsd-x64": "12.3.4", "@next/swc-linux-arm-gnueabihf": "12.3.4", "@next/swc-linux-arm64-gnu": "12.3.4", "@next/swc-linux-arm64-musl": "12.3.4", "@next/swc-linux-x64-gnu": "12.3.4", "@next/swc-linux-x64-musl": "12.3.4", "@next/swc-win32-arm64-msvc": "12.3.4", "@next/swc-win32-ia32-msvc": "12.3.4", "@next/swc-win32-x64-msvc": "12.3.4", "@swc/helpers": "0.4.11", "caniuse-lite": "^1.0.30001406", "postcss": "8.4.14", "styled-jsx": "5.0.7", "use-sync-external-store": "1.2.0" }, "peerDependencies": { "fibers": ">= 3.1.0", "node-sass": "^6.0.0 || ^7.0.0", "react": "^17.0.2 || ^18.0.0-0", "react-dom": "^17.0.2 || ^18.0.0-0", "sass": "^1.3.0" }, "optionalPeers": ["fibers", "node-sass", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-VcyMJUtLZBGzLKo3oMxrEF0stxh8HwuW976pAzlHhI3t8qJ4SROjCrSh1T24bhrbjw55wfZXAbXPGwPt5FLRfQ=="],
"next": ["next@12.3.4", "", { "dependencies": { "@next/env": "12.3.4", "@next/swc-android-arm-eabi": "12.3.4", "@next/swc-android-arm64": "12.3.4", "@next/swc-darwin-arm64": "12.3.4", "@next/swc-darwin-x64": "12.3.4", "@next/swc-freebsd-x64": "12.3.4", "@next/swc-linux-arm-gnueabihf": "12.3.4", "@next/swc-linux-arm64-gnu": "12.3.4", "@next/swc-linux-arm64-musl": "12.3.4", "@next/swc-linux-x64-gnu": "12.3.4", "@next/swc-linux-x64-musl": "12.3.4", "@next/swc-win32-arm64-msvc": "12.3.4", "@next/swc-win32-ia32-msvc": "12.3.4", "@next/swc-win32-x64-msvc": "12.3.4", "@swc/helpers": "0.4.11", "caniuse-lite": "^1.0.30001406", "postcss": "8.4.14", "styled-jsx": "5.0.7", "use-sync-external-store": "1.2.0" }, "peerDependencies": { "fibers": ">= 3.1.0", "node-sass": "^6.0.0 || ^7.0.0", "react": "^17.0.2 || ^18.0.0-0", "react-dom": "^17.0.2 || ^18.0.0-0", "sass": "^1.3.0" }, "optionalPeers": ["node-sass", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-VcyMJUtLZBGzLKo3oMxrEF0stxh8HwuW976pAzlHhI3t8qJ4SROjCrSh1T24bhrbjw55wfZXAbXPGwPt5FLRfQ=="],
"nice-try": ["nice-try@1.0.5", "", {}, "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ=="],
@@ -1649,7 +1649,7 @@
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"ws": ["ws@7.4.6", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A=="],
"ws": ["ws@7.4.6", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["utf-8-validate"] }, "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A=="],
"xml-name-validator": ["xml-name-validator@3.0.0", "", {}, "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw=="],
@@ -1701,7 +1701,7 @@
"@parcel/packager-js/globals": ["globals@13.20.0", "", { "dependencies": { "type-fest": "^0.20.2" } }, "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ=="],
"@parcel/reporter-dev-server/ws": ["ws@7.5.9", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q=="],
"@parcel/reporter-dev-server/ws": ["ws@7.5.9", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["utf-8-validate"] }, "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q=="],
"@parcel/transformer-babel/semver": ["semver@5.7.1", "", { "bin": { "semver": "./bin/semver" } }, "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="],

View File

@@ -23,7 +23,7 @@ $ hyperfine --prepare 'rm -rf node_modules' --runs 1 'bun install' 'pnpm install
To run the benchmark with offline mode but without lockfiles:
```sh
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lock && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lockb && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
```
##

View File

@@ -12,7 +12,6 @@
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fastify": "^5.0.0",
"fdir": "^6.1.0",
"mitata": "^1.0.25",
"react": "^18.3.1",

View File

@@ -1,175 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -1,27 +0,0 @@
# Postgres table load benchmark
To install dependencies:
```bash
bun install
```
To run in Bun:
```bash
bun ./index.mjs
```
To run in Node.js:
```bash
node index.mjs
```
To run in Deno:
```bash
deno run -A index.mjs
```
You will need a localhost Postgres server running.

Binary file not shown.

View File

@@ -1,47 +0,0 @@
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
import postgres from "postgres";
const sql = isBun ? Bun.sql : postgres;
// Create the table if it doesn't exist
await sql`
CREATE TABLE IF NOT EXISTS "users_bun_bench" (
id SERIAL PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
email TEXT NOT NULL UNIQUE,
dob TEXT NOT NULL
)
`;
// Check if users already exist
const existingUsers = await sql`SELECT COUNT(*) as count FROM "users_bun_bench"`;
if (+(existingUsers?.[0]?.count ?? existingUsers?.count) < 100) {
// Generate 100 users if none exist
const users = Array.from({ length: 100 }, (_, i) => ({
first_name: `FirstName${i}`,
last_name: `LastName${i}`,
email: `user${i}@example.com`,
dob: new Date(1970 + Math.floor(Math.random() * 30), Math.floor(Math.random() * 12), Math.floor(Math.random() * 28))
.toISOString()
.split("T")[0],
}));
// Insert all users
await sql`
INSERT INTO users_bun_bench (first_name, last_name, email, dob) ${sql(users)}
`;
}
const type = isBun ? "Bun.sql" : "postgres";
console.time(type);
let promises = [];
for (let i = 0; i < 100_000; i++) {
promises.push(sql`SELECT * FROM "users_bun_bench" LIMIT 100`);
if (i % 100 === 0 && promises.length > 1) {
await Promise.all(promises);
promises.length = 0;
}
}
await Promise.all(promises);
console.timeEnd(type);

View File

@@ -1,14 +0,0 @@
{
"name": "postgres",
"module": "index.ts",
"type": "module",
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"postgres": "^3.4.5"
}
}

View File

@@ -1,27 +0,0 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "react-hello-world",

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "scan",

View File

@@ -25,10 +25,6 @@ bench("Buffer.from('short string')", () => {
return Buffer.from("short string");
});
bench("new Buffer('short string')", () => {
return new Buffer("short string");
});
const loooong = "long string".repeat(9999).split("").join(" ");
bench("Buffer.byteLength('long string'.repeat(9999))", () => {
return Buffer.byteLength(loooong);
@@ -49,14 +45,6 @@ bench("Buffer.from(Uint8Array(0))", () => {
return Buffer.from(empty);
});
bench("new Buffer(ArrayBuffer(100))", () => {
return new Buffer(hundred);
});
bench("new Buffer(Uint8Array(100))", () => {
return new Buffer(hundredArray);
});
bench("new Buffer(Uint8Array(0))", () => {
return new Buffer(empty);
});

View File

@@ -1,17 +0,0 @@
const buf = Buffer.from(
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
);
const INTERVAL = 9_999_999;
const time = (name, fn) => {
for (let i = 0; i < INTERVAL; i++) fn();
console.time(name.padEnd(30));
for (let i = 0; i < INTERVAL; i++) fn();
console.timeEnd(name.padEnd(30));
};
console.log(`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`, "\n");
time("includes true", () => buf.includes("nisi"));
time("includes false", () => buf.includes("oopwo"));

View File

@@ -1,7 +1,7 @@
// import { Buffer } from "buffer";
var buf = new Buffer(1024);
var view = new DataView(buf.buffer);
var INTERVAL = 9_999_999;
var INTERVAL = 9999999;
var time = (name, fn) => {
for (let i = 0; i < INTERVAL; i++) fn();

View File

@@ -1,71 +0,0 @@
import { bench, run } from "../runner.mjs";
let decodeURIComponentSIMD;
if (typeof Bun !== "undefined") {
({ decodeURIComponentSIMD } = await import("bun:internal-for-testing"));
}
const hugeText = Buffer.alloc(1000000, "Hello, world!").toString();
const hugeTextWithPercentAtEnd = Buffer.alloc(1000000, "Hello, world!%40").toString();
const tinyText = Buffer.alloc(100, "Hello, world!").toString();
const tinyTextWithPercentAtEnd = Buffer.alloc(100, "Hello, world!%40").toString();
const veryTinyText = Buffer.alloc(8, "a").toString();
const veryTinyTextWithPercentAtEnd = Buffer.alloc(8, "a%40").toString();
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 8 bytes", () => {
decodeURIComponentSIMD(veryTinyText);
});
bench(" decodeURIComponent - no % x 8 bytes", () => {
decodeURIComponent(veryTinyText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 8 bytes", () => {
decodeURIComponentSIMD(veryTinyTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 8 bytes", () => {
decodeURIComponent(veryTinyTextWithPercentAtEnd);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 100 bytes", () => {
decodeURIComponentSIMD(tinyText);
});
bench(" decodeURIComponent - no % x 100 bytes", () => {
decodeURIComponent(tinyText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 100 bytes", () => {
decodeURIComponentSIMD(tinyTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 100 bytes", () => {
decodeURIComponent(tinyTextWithPercentAtEnd);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - no % x 1 MB", () => {
decodeURIComponentSIMD(hugeText);
});
bench(" decodeURIComponent - no % x 1 MB", () => {
decodeURIComponent(hugeText);
});
decodeURIComponentSIMD &&
bench("decodeURIComponentSIMD - yes % x 1 MB", () => {
decodeURIComponentSIMD(hugeTextWithPercentAtEnd);
});
bench(" decodeURIComponent - yes % x 1 MB", () => {
decodeURIComponent(hugeTextWithPercentAtEnd);
});
await run();

View File

@@ -1,13 +0,0 @@
import express from "express";
const app = express();
const port = 3000;
var i = 0;
app.get("/", (req, res) => {
res.send("Hello World!" + i++);
});
app.listen(port, () => {
console.log(`Express app listening at http://localhost:${port}`);
});

View File

@@ -1,20 +0,0 @@
import Fastify from "fastify";
const fastify = Fastify({
logger: false,
});
fastify.get("/", async (request, reply) => {
return { hello: "world" };
});
const start = async () => {
try {
await fastify.listen({ port: 3000 });
} catch (err) {
fastify.log.error(err);
process.exit(1);
}
};
start();

View File

@@ -1,25 +0,0 @@
import { posix } from "path";
import { bench, run } from "../runner.mjs";
const pathConfigurations = [
["", ""],
[".", "."],
["/foo/bar", "/foo/bar"],
["/foo/bar/baz", "/foo/bar"],
["/foo/bar", "/foo/bar/baz"],
["/foo/bar/baz", "/foo/bar/qux"],
["/foo/bar/baz", "/foo/bar/baz/qux"],
["/foo/bar/baz", "/foo/bar/baz/qux/quux"],
["/", "/foo"],
["/foo", "/"],
["foo/bar/baz", "foo/bar/qux"],
["../foo/bar", "../foo/baz"],
];
pathConfigurations.forEach(([from, to]) => {
bench(`relative(${JSON.stringify(from)}, ${JSON.stringify(to)})`, () => {
globalThis.abc = posix.relative(from, to);
});
});
await run();

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,4 +1,4 @@
import { Database } from "https://deno.land/x/sqlite3@0.12.0/mod.ts";
import { Database } from "https://deno.land/x/sqlite3@0.11.1/mod.ts";
import { bench, run } from "../runner.mjs";
const db = new Database("./src/northwind.sqlite");

82
bench/sqlite/deno.lock generated
View File

@@ -1,82 +0,0 @@
{
"version": "4",
"specifiers": {
"jsr:@denosaurs/plug@1": "1.0.6",
"jsr:@std/assert@0.217": "0.217.0",
"jsr:@std/assert@0.221": "0.221.0",
"jsr:@std/encoding@0.221": "0.221.0",
"jsr:@std/fmt@0.221": "0.221.0",
"jsr:@std/fs@0.221": "0.221.0",
"jsr:@std/path@0.217": "0.217.0",
"jsr:@std/path@0.221": "0.221.0"
},
"jsr": {
"@denosaurs/plug@1.0.6": {
"integrity": "6cf5b9daba7799837b9ffbe89f3450510f588fafef8115ddab1ff0be9cb7c1a7",
"dependencies": [
"jsr:@std/encoding",
"jsr:@std/fmt",
"jsr:@std/fs",
"jsr:@std/path@0.221"
]
},
"@std/assert@0.217.0": {
"integrity": "c98e279362ca6982d5285c3b89517b757c1e3477ee9f14eb2fdf80a45aaa9642"
},
"@std/assert@0.221.0": {
"integrity": "a5f1aa6e7909dbea271754fd4ab3f4e687aeff4873b4cef9a320af813adb489a"
},
"@std/encoding@0.221.0": {
"integrity": "d1dd76ef0dc5d14088411e6dc1dede53bf8308c95d1537df1214c97137208e45"
},
"@std/fmt@0.221.0": {
"integrity": "379fed69bdd9731110f26b9085aeb740606b20428ce6af31ef6bd45ef8efa62a"
},
"@std/fs@0.221.0": {
"integrity": "028044450299de8ed5a716ade4e6d524399f035513b85913794f4e81f07da286",
"dependencies": [
"jsr:@std/assert@0.221",
"jsr:@std/path@0.221"
]
},
"@std/path@0.217.0": {
"integrity": "1217cc25534bca9a2f672d7fe7c6f356e4027df400c0e85c0ef3e4343bc67d11",
"dependencies": [
"jsr:@std/assert@0.217"
]
},
"@std/path@0.221.0": {
"integrity": "0a36f6b17314ef653a3a1649740cc8db51b25a133ecfe838f20b79a56ebe0095",
"dependencies": [
"jsr:@std/assert@0.221"
]
}
},
"remote": {
"https://deno.land/x/sqlite3@0.11.1/deno.json": "77126f50d0efce1375173fae94d4df7f732cd25f05d8aa74f8ff801ef4d85caf",
"https://deno.land/x/sqlite3@0.11.1/deps.ts": "d2f23a4489d27ed7ba1f601b86a85ff488a87603e4be7a15f3ea15154fc288ec",
"https://deno.land/x/sqlite3@0.11.1/mod.ts": "3169f246c0eddd6ed82862758f4109f167b7ba5538236240fbb26a129f1bc16c",
"https://deno.land/x/sqlite3@0.11.1/src/blob.ts": "3681353b3c97bc43f9b02f8d1c3269c0dc4eb9cb5d3af16c7ce4d1e1ec7507c4",
"https://deno.land/x/sqlite3@0.11.1/src/constants.ts": "85fd27aa6e199093f25f5f437052e16fd0e0870b96ca9b24a98e04ddc8b7d006",
"https://deno.land/x/sqlite3@0.11.1/src/database.ts": "063281b9b4340c781ba611cb5fef7ab0fc885cb87ed4c8ec123fd772e0da5f8b",
"https://deno.land/x/sqlite3@0.11.1/src/ffi.ts": "6648dc15f10312df9d2fc8e6e2be230d82a552f28b8f77d03f32bbfba9198888",
"https://deno.land/x/sqlite3@0.11.1/src/statement.ts": "5fe86e1a0136a259c055a03988e74490d9d131c058b4c1a18385a6770cd47e2a",
"https://deno.land/x/sqlite3@0.11.1/src/util.ts": "c6604183d2ec5fb17fa0a018572ed5f2317b319dbd7bf48d88a5d06ff25b2cc3",
"https://deno.land/x/sqlite3@0.12.0/deno.json": "b03d6de05f953886662ea987212539af8456a91352684c84af2188520449d42a",
"https://deno.land/x/sqlite3@0.12.0/deps.ts": "d2f23a4489d27ed7ba1f601b86a85ff488a87603e4be7a15f3ea15154fc288ec",
"https://deno.land/x/sqlite3@0.12.0/mod.ts": "3169f246c0eddd6ed82862758f4109f167b7ba5538236240fbb26a129f1bc16c",
"https://deno.land/x/sqlite3@0.12.0/src/blob.ts": "330886fae9714e4a612786f44d8117d65f91e778cf3f40de59b34879fc7ca9ab",
"https://deno.land/x/sqlite3@0.12.0/src/constants.ts": "85fd27aa6e199093f25f5f437052e16fd0e0870b96ca9b24a98e04ddc8b7d006",
"https://deno.land/x/sqlite3@0.12.0/src/database.ts": "4d380d7f0e5a2cf74635a9fcd2b4e27373533f2816cde5357067e51fd22ad8d0",
"https://deno.land/x/sqlite3@0.12.0/src/ffi.ts": "795b598eeae4d12f182e7bcdab524b74b0f01d6deae7f4d8ce63f25c06a46154",
"https://deno.land/x/sqlite3@0.12.0/src/statement.ts": "e8ccde898aef47c7a2514953aca5359a44a285bc3dc0de5819d66f891f477be1",
"https://deno.land/x/sqlite3@0.12.0/src/util.ts": "c6604183d2ec5fb17fa0a018572ed5f2317b319dbd7bf48d88a5d06ff25b2cc3"
},
"workspace": {
"packageJson": {
"dependencies": [
"npm:better-sqlite3@8.5.0"
]
}
}
}

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "websocket-server",

106
build.zig
View File

@@ -19,17 +19,16 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
const pathRel = fs.path.relative;
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
const recommended_zig_version = "0.14.0";
const recommended_zig_version = "0.13.0";
comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
"Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
"CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
"upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
"comment this error out.",
"Bun requires Zig version " ++ recommended_zig_version ++ ". This is" ++
"automatically configured via Bun's CMake setup. You likely meant to run" ++
"`bun setup`. If you are trying to upgrade the Zig compiler," ++
"run `./scripts/download-zig.sh master` or comment this message out.",
);
}
}
@@ -47,7 +46,6 @@ const BunBuildOptions = struct {
sha: []const u8,
/// enable debug logs in release builds
enable_logs: bool = false,
enable_asan: bool,
tracy_callstack_depth: u16,
reported_nodejs_version: Version,
/// To make iterating on some '@embedFile's faster, we load them at runtime
@@ -83,7 +81,10 @@ const BunBuildOptions = struct {
var opts = b.addOptions();
opts.addOption([]const u8, "base_path", b.pathFromRoot("."));
opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{ b.build_root.path.?, this.codegen_path }) catch @panic("OOM"));
opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{
b.build_root.path.?,
this.codegen_path,
}) catch @panic("OOM"));
opts.addOption(bool, "codegen_embed", this.shouldEmbedCode());
opts.addOption(u32, "canary_revision", this.canary_revision orelse 0);
@@ -151,7 +152,8 @@ pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel {
pub fn build(b: *Build) !void {
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
checked_file_exists = std.AutoHashMap(u64, void).init(b.allocator);
b.zig_lib_dir = b.zig_lib_dir orelse b.path("vendor/zig/lib");
// TODO: Upgrade path for 0.14.0
// b.graph.zig_lib_directory = brk: {
@@ -206,7 +208,7 @@ pub fn build(b: *Build) !void {
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
b.reference_trace = ref_trace: {
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24;
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 16;
break :ref_trace if (trace == 0) null else trace;
};
@@ -274,7 +276,6 @@ pub fn build(b: *Build) !void {
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
};
// zig build obj
@@ -319,21 +320,7 @@ pub fn build(b: *Build) !void {
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}, &.{ .Debug, .ReleaseFast });
}
// zig build check-all-debug
{
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}, &.{.Debug});
});
}
// zig build check-windows
@@ -341,27 +328,13 @@ pub fn build(b: *Build) !void {
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
}
{
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
});
}
// zig build translate-c-headers
{
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step);
step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step);
}
// zig build enum-extractor
@@ -383,14 +356,13 @@ pub fn addMultiCheck(
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
optimize: []const std.builtin.OptimizeMode,
) void {
for (to_check) |check| {
for (optimize) |mode| {
for ([_]std.builtin.Mode{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = if (check.musl) null else getOSGlibCVersion(check.os),
});
@@ -408,7 +380,6 @@ pub fn addMultiCheck(
.reported_nodejs_version = root_build_options.reported_nodejs_version,
.codegen_path = root_build_options.codegen_path,
.no_llvm = root_build_options.no_llvm,
.enable_asan = root_build_options.enable_asan,
};
var obj = addBunObject(b, &options);
@@ -442,7 +413,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
.root_source_file = switch (opts.os) {
.wasm => b.path("root_wasm.zig"),
else => b.path("src/main.zig"),
else => b.path("root.zig"),
// else => b.path("root_css.zig"),
},
.target = opts.target,
@@ -456,15 +427,8 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
.omit_frame_pointer = false,
.strip = false, // stripped at the end
});
if (opts.enable_asan) {
if (@hasField(Build.Module, "sanitize_address")) {
obj.root_module.sanitize_address = true;
} else {
const fail_step = b.addFail("asan is not supported on this platform");
obj.step.dependOn(&fail_step.step);
}
}
obj.bundle_compiler_rt = false;
obj.formatted_panics = true;
obj.root_module.omit_frame_pointer = false;
// Link libc
@@ -517,21 +481,25 @@ pub fn addInstallObjectFile(
}, b.fmt("{s}.o", .{name})).step;
}
var checked_file_exists: std.AutoHashMap(u64, void) = undefined;
fn exists(path: []const u8) bool {
const entry = checked_file_exists.getOrPut(std.hash.Wyhash.hash(0, path)) catch unreachable;
if (entry.found_existing) {
// It would've panicked.
return true;
}
std.fs.accessAbsolute(path, .{ .mode = .read_only }) catch return false;
const file = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
file.close();
return true;
}
fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
const os = opts.os;
const io_path = switch (os) {
.mac => "src/io/io_darwin.zig",
.linux => "src/io/io_linux.zig",
.windows => "src/io/io_windows.zig",
else => "src/io/io_stub.zig",
};
obj.root_module.addAnonymousImport("async_io", .{
.root_source_file = b.path(io_path),
});
const zlib_internal_path = switch (os) {
.windows => "src/deps/zlib.win32.zig",
.linux, .mac => "src/deps/zlib.posix.zig",
@@ -564,7 +532,6 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
.{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() },
.{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() },
.{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() },
.{ .file = "node-fallbacks/react-refresh.js", .enable = opts.shouldEmbedCode() },
.{ .file = "node-fallbacks/assert.js", .enable = opts.shouldEmbedCode() },
.{ .file = "node-fallbacks/buffer.js", .enable = opts.shouldEmbedCode() },
.{ .file = "node-fallbacks/console.js", .enable = opts.shouldEmbedCode() },
@@ -601,15 +568,6 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
});
}
}
inline for (.{
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
.{ .import = "completions-fish", .file = b.path("completions/bun.fish") },
}) |entry| {
obj.root_module.addAnonymousImport(entry.import, .{
.root_source_file = entry.file,
});
}
if (os == .windows) {
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
@@ -649,7 +607,7 @@ const WindowsShim = struct {
.optimize = .ReleaseFast,
.use_llvm = true,
.use_lld = true,
.unwind_tables = .none,
.unwind_tables = false,
.omit_frame_pointer = true,
.strip = true,
.linkage = .static,

View File

@@ -1,11 +1,11 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bun",
"devDependencies": {
"@mdn/browser-compat-data": "~5.5.28",
"@types/bun": "*",
"@types/bun": "^1.1.3",
"@types/react": "^18.3.3",
"@typescript-eslint/eslint-plugin": "^7.11.0",
"@typescript-eslint/parser": "^7.11.0",
@@ -28,7 +28,7 @@
"packages/bun-types": {
"name": "bun-types",
"dependencies": {
"@types/node": "*",
"@types/node": "~20.12.8",
"@types/ws": "~8.5.10",
},
"devDependencies": {
@@ -151,13 +151,13 @@
"@qiwi/npm-registry-client": ["@qiwi/npm-registry-client@8.9.1", "", { "dependencies": { "concat-stream": "^2.0.0", "graceful-fs": "^4.2.4", "normalize-package-data": "~1.0.1 || ^2.0.0 || ^3.0.0", "npm-package-arg": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^8.0.0", "once": "^1.4.0", "request": "^2.88.2", "retry": "^0.12.0", "safe-buffer": "^5.2.1", "semver": "2 >=2.2.1 || 3.x || 4 || 5 || 7", "slide": "^1.1.6", "ssri": "^8.0.0" }, "optionalDependencies": { "npmlog": "2 || ^3.1.0 || ^4.0.0" } }, "sha512-rZF+mG+NfijR0SHphhTLHRr4aM4gtfdwoAMY6we2VGQam8vkN1cxGG1Lg/Llrj8Dd0Mu6VjdFQRyMMRZxtZR2A=="],
"@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="],
"@types/bun": ["@types/bun@1.1.6", "", { "dependencies": { "bun-types": "1.1.17" } }, "sha512-uJgKjTdX0GkWEHZzQzFsJkWp5+43ZS7HC8sZPFnOwnSo1AsNl2q9o2bFeS23disNDqbggEgyFkKCHl/w8iZsMA=="],
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
"@types/json5": ["@types/json5@0.0.29", "", {}, "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ=="],
"@types/node": ["@types/node@22.13.5", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg=="],
"@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
"@types/prop-types": ["@types/prop-types@15.7.12", "", {}, "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q=="],
@@ -257,7 +257,7 @@
"builtins": ["builtins@1.0.3", "", {}, "sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ=="],
"bun-types": ["bun-types@workspace:packages/bun-types"],
"bun-types": ["bun-types@workspace:packages/bun-types", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" }, "devDependencies": { "@biomejs/biome": "^1.5.3", "@definitelytyped/dtslint": "^0.0.199", "@definitelytyped/eslint-plugin": "^0.0.197", "typescript": "^5.0.2" } }],
"call-bind": ["call-bind@1.0.7", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.1" } }, "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w=="],
@@ -265,7 +265,7 @@
"camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
"caniuse-lite": ["caniuse-lite@1.0.30001695", "", {}, "sha512-vHyLade6wTgI2u1ec3WQBxv+2BrTERV28UXQu9LO6lZ9pYeMk34vjXFLOxo1A4UBA8XTL4njRQZdno/yYaSmWw=="],
"caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
"capital-case": ["capital-case@1.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A=="],
@@ -845,7 +845,7 @@
"unbox-primitive": ["unbox-primitive@1.0.2", "", { "dependencies": { "call-bind": "^1.0.2", "has-bigints": "^1.0.2", "has-symbols": "^1.0.3", "which-boxed-primitive": "^1.0.2" } }, "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw=="],
"undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="],
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"update-browserslist-db": ["update-browserslist-db@1.1.0", "", { "dependencies": { "escalade": "^3.1.2", "picocolors": "^1.0.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ=="],
@@ -915,21 +915,17 @@
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
"@types/ws/@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
"@typescript-eslint/visitor-keys/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
"are-we-there-yet/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
"autoprefixer/caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
"babel-code-frame/chalk": ["chalk@1.1.3", "", { "dependencies": { "ansi-styles": "^2.2.1", "escape-string-regexp": "^1.0.2", "has-ansi": "^2.0.0", "strip-ansi": "^3.0.0", "supports-color": "^2.0.0" } }, "sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A=="],
"babel-code-frame/js-tokens": ["js-tokens@3.0.2", "", {}, "sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg=="],
"browserslist/caniuse-lite": ["caniuse-lite@1.0.30001653", "", {}, "sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw=="],
"bun-types/typescript": ["typescript@5.5.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ=="],
"eslint-import-resolver-node/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
@@ -1007,8 +1003,6 @@
"@definitelytyped/utils/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
"@types/ws/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
"are-we-there-yet/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],

View File

@@ -2,7 +2,3 @@
# https://github.com/oven-sh/bun/issues/16289
[test]
preload = ["./test/js/node/harness.ts", "./test/preload.ts"]
[install]
# Node.js never auto-installs modules.
auto = "disable"

84
ci/README.md Normal file
View File

@@ -0,0 +1,84 @@
# CI
This directory contains scripts for building CI images for Bun.
## Building
### `macOS`
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
To install the dependencies required, run:
```sh
$ cd ci
$ bun run bootstrap
```
To build a vanilla macOS VM, run:
```sh
$ bun run build:darwin-aarch64-vanilla
```
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
> Note: The image size is 50GB, so make sure you have enough disk space.
If you want to build a specific macOS release, you can run:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: You cannot build a newer release of macOS on an older macOS machine.
To build a macOS VM with software installed to build and test Bun, run:
```sh
$ bun run build:darwin-aarch64
```
## Running
### `macOS`
## How To
### Support a new macOS release
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
```hcl
sonoma = {
distro = "sonoma"
release = "15"
ipsw = "https://updates.cdn-apple.com/..."
}
```
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
4. Test and build the non-vanilla image:
```sh
$ bun run build:darwin-aarch64-15
```
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
5. Publish the images:
```sh
$ bun run login
$ bun run publish:darwin-aarch64-vanilla-15
$ bun run publish:darwin-aarch64-15
```

View File

@@ -0,0 +1,22 @@
FROM alpine:edge AS build
ARG GIT_SHA
ENV GIT_SHA=${GIT_SHA}
WORKDIR /app/bun
ENV HOME=/root
COPY . .
RUN touch $HOME/.bashrc
RUN ./scripts/bootstrap.sh
RUN . $HOME/.bashrc && bun run build:release
RUN apk add file
RUN file ./build/release/bun
RUN ldd ./build/release/bun
RUN ./build/release/bun
RUN cp -R /app/bun/build/* /output
FROM scratch AS artifact
COPY --from=build /output /
# docker build -f ./ci/alpine/build.Dockerfile --progress=plain --build-arg GIT_SHA="$(git rev-parse HEAD)" --target=artifact --output type=local,dest=./build-alpine .

20
ci/alpine/test.Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
FROM alpine:edge
ENV HOME=/root
WORKDIR /root
COPY ./build-alpine/release/bun .
COPY ./test ./test
COPY ./scripts ./scripts
COPY ./package.json ./package.json
COPY ./packages ./packages
RUN apk update
RUN apk add nodejs lsb-release-minimal git python3 npm make g++
RUN apk add file
RUN file /root/bun
RUN ldd /root/bun
RUN /root/bun
RUN ./scripts/runner.node.mjs --exec-path /root/bun
# docker build -f ./ci/alpine/test.Dockerfile --progress=plain .

View File

@@ -0,0 +1,46 @@
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
# See login.sh and optimize.sh for details.
data "external-raw" "boot-script" {
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
}
source "tart-cli" "bun-darwin-aarch64-vanilla" {
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
from_ipsw = local.release.ipsw
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
create_grace_time = "30s"
boot_command = split("\n", data.external-raw.boot-script.result)
headless = true # Disable if you need to debug why the boot_command is not working
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
provisioner "file" {
content = file("scripts/setup-login.sh")
destination = "/tmp/setup-login.sh"
}
provisioner "shell" {
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
}
provisioner "file" {
content = file("scripts/optimize-machine.sh")
destination = "/tmp/optimize-machine.sh"
}
provisioner "shell" {
inline = ["sudo sh /tmp/optimize-machine.sh"]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

44
ci/darwin/image.pkr.hcl Normal file
View File

@@ -0,0 +1,44 @@
# Generates a macOS VM with software installed to build and test Bun.
source "tart-cli" "bun-darwin-aarch64" {
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
headless = true
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64"]
provisioner "file" {
content = file("../../scripts/bootstrap.sh")
destination = "/tmp/bootstrap.sh"
}
provisioner "shell" {
inline = ["CI=true sh /tmp/bootstrap.sh"]
}
provisioner "file" {
source = "darwin/plists/"
destination = "/tmp/"
}
provisioner "shell" {
inline = [
"sudo ls /tmp/",
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.buildkite.buildkite-agent</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/buildkite-agent</string>
<string>start</string>
</array>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false />
</dict>
<key>RunAtLoad</key>
<true />
<key>StandardOutPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>EnvironmentVariables</key>
<dict>
<key>BUILDKITE_AGENT_CONFIG</key>
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
</dict>
<key>LimitLoadToSessionType</key>
<array>
<string>Aqua</string>
<string>LoginWindow</string>
<string>Background</string>
<string>StandardIO</string>
<string>System</string>
</array>
</dict>
</plist>

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscale</string>
<string>up</string>
<string>--ssh</string>
<string>--authkey</string>
<string>${TAILSCALE_AUTHKEY}</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscaled</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

124
ci/darwin/scripts/boot-image.sh Executable file
View File

@@ -0,0 +1,124 @@
#!/bin/sh
# This script generates the boot commands for the macOS installer GUI.
# It is run on your local machine, not inside the VM.
# Sources:
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
echo "Script must be run with variables: release, username, and password" >&2
exit 1
fi
# Hello, hola, bonjour, etc.
echo "<wait120s><spacebar>"
# Select Your Country and Region
echo "<wait30s>italiano<esc>english<enter>"
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
# Written and Spoken Languages
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Accessibility
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Data & Privacy
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Migration Assistant
echo "<wait30s><tab><tab><tab><spacebar>"
# Sign In with Your Apple ID
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you want to skip signing in with an Apple ID?
echo "<wait30s><tab><spacebar>"
# Terms and Conditions
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# I have read and agree to the macOS Software License Agreement
echo "<wait30s><tab><spacebar>"
# Create a Computer Account
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
# Enable Location Services
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you don't want to use Location Services?
echo "<wait30s><tab><spacebar>"
# Select Your Time Zone
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
# Analytics
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Screen Time
echo "<wait30s><tab><spacebar>"
# Siri
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
# Choose Your Look
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
# Enable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
else
# Welcome to Mac
echo "<wait30s><spacebar>"
# Enable Keyboard navigation
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
echo "<wait30s><leftAltOn>q<leftAltOff>"
fi
# Now that the installation is done, open "System Settings"
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
# Navigate to "Sharing"
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
if [ "${release}" = "13" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><down><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
# Open "Remote Login" details
echo "<wait30s><tab><spacebar>"
# Enable "Full Disk Access"
echo "<wait30s><tab><spacebar>"
# Click "Done"
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Disable Voice Over
echo "<leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "14" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
# Disable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "15" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
fi
# Quit System Settings
echo "<wait30s><leftAltOn>q<leftAltOff>"

View File

@@ -0,0 +1,122 @@
#!/bin/sh
# This script optimizes macOS for virtualized environments.
# It disables things like spotlight, screen saver, and sleep.
# Sources:
# - https://github.com/sickcodes/osx-optimizer
# - https://github.com/koding88/MacBook-Optimization-Script
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
disable_software_update() {
execute softwareupdate --schedule off
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
execute defaults write com.apple.commerce AutoUpdate -bool false
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
}
disable_spotlight() {
execute mdutil -i off -a
execute mdutil -E /
}
disable_siri() {
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
execute defaults write com.apple.Siri StatusMenuVisible -bool false
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
}
disable_sleep() {
execute systemsetup -setsleep Never
execute systemsetup -setcomputersleep Never
execute systemsetup -setdisplaysleep Never
execute systemsetup -setharddisksleep Never
}
disable_screen_saver() {
execute defaults write com.apple.screensaver loginWindowIdleTime 0
execute defaults write com.apple.screensaver idleTime 0
}
disable_screen_lock() {
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
}
disable_wallpaper() {
execute defaults write com.apple.loginwindow DesktopPicture ""
}
disable_application_state() {
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
}
disable_accessibility() {
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
execute defaults write com.apple.universalaccess reduceMotion -int 1
execute defaults write com.apple.universalaccess reduceTransparency -int 1
}
disable_dashboard() {
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
execute killall Dock
}
disable_animations() {
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
execute defaults write -g QLPanelAnimationDuration -float 0
execute defaults write com.apple.finder DisableAllAnimations -bool true
}
disable_time_machine() {
execute tmutil disable
}
enable_performance_mode() {
# https://support.apple.com/en-us/101992
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
fi
}
add_terminal_to_desktop() {
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
}
main() {
disable_software_update
disable_spotlight
disable_siri
disable_sleep
disable_screen_saver
disable_screen_lock
disable_wallpaper
disable_application_state
disable_accessibility
disable_dashboard
disable_animations
disable_time_machine
enable_performance_mode
add_terminal_to_desktop
}
main

View File

@@ -0,0 +1,78 @@
#!/bin/sh
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
# Sources:
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
kcpassword() {
passwd="$1"
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
key_len=33
passwd_len=${#passwd_hex}
remainder=$((passwd_len % key_len))
if [ $remainder -ne 0 ]; then
padding=$((key_len - remainder))
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
fi
result=""
i=0
while [ $i -lt ${#passwd_hex} ]; do
for byte in $key; do
[ $i -ge ${#passwd_hex} ] && break
p="${passwd_hex:$i:2}"
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
result="${result}${r}"
i=$((i + 2))
done
done
echo "$result"
}
login() {
username="$1"
password="$2"
enable_passwordless_sudo() {
execute mkdir -p /etc/sudoers.d/
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
}
enable_auto_login() {
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
}
disable_screen_lock() {
execute sysadminctl -screenLock off -password "${password}"
}
enable_passwordless_sudo
enable_auto_login
disable_screen_lock
}
if [ $# -ne 2 ]; then
echo "Usage: $0 <username> <password>" >&2
exit 1
fi
login "$@"

View File

@@ -0,0 +1,78 @@
packer {
required_plugins {
tart = {
version = ">= 1.12.0"
source = "github.com/cirruslabs/tart"
}
external = {
version = ">= 0.0.2"
source = "github.com/joomcode/external"
}
}
}
variable "release" {
type = number
default = 13
}
variable "username" {
type = string
default = "admin"
}
variable "password" {
type = string
default = "admin"
}
variable "cpu_count" {
type = number
default = 2
}
variable "memory_gb" {
type = number
default = 4
}
variable "disk_size_gb" {
type = number
default = 50
}
locals {
sequoia = {
tier = 1
distro = "sequoia"
release = "15"
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
}
sonoma = {
tier = 2
distro = "sonoma"
release = "14"
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
}
ventura = {
tier = 2
distro = "ventura"
release = "13"
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
}
releases = {
15 = local.sequoia
14 = local.sonoma
13 = local.ventura
}
release = local.releases[var.release]
username = var.username
password = var.password
cpu_count = var.cpu_count
memory_gb = var.memory_gb
disk_size_gb = var.disk_size_gb
}

18
ci/linux/Dockerfile Normal file
View File

@@ -0,0 +1,18 @@
ARG IMAGE=debian:11
FROM $IMAGE
COPY ./scripts/bootstrap.sh /tmp/bootstrap.sh
ENV CI=true
RUN sh /tmp/bootstrap.sh && rm -rf /tmp/*
WORKDIR /workspace/bun
COPY bunfig.toml bunfig.toml
COPY package.json package.json
COPY CMakeLists.txt CMakeLists.txt
COPY cmake/ cmake/
COPY scripts/ scripts/
COPY patches/ patches/
COPY *.zig ./
COPY src/ src/
COPY packages/ packages/
COPY test/ test/
RUN bun i
RUN bun run build:ci

View File

@@ -0,0 +1,27 @@
#!/bin/sh
# This script sets the hostname of the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <hostname>" >&2
exit 1
fi
if [ -f "$(which hostnamectl)" ]; then
execute hostnamectl set-hostname "$1"
else
echo "Error: hostnamectl is not installed." >&2
exit 1
fi
}
main "$@"

View File

@@ -0,0 +1,22 @@
#!/bin/sh
# This script starts tailscale on the current machine.
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
main() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <auth-key>" >&2
exit 1
fi
execute tailscale up --reset --ssh --accept-risk=lose-ssh --auth-key="$1"
}
main "$@"

27
ci/package.json Normal file
View File

@@ -0,0 +1,27 @@
{
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "token=$(gh auth token); username=$(gh api user --jq .login); echo \"Login as $username...\"; echo \"$token\" | tart login ghcr.io --username \"$username\" --password-stdin; echo \"$token\" | docker login ghcr.io --username \"$username\" --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
}
}

View File

@@ -44,13 +44,6 @@ if(WIN32)
)
endif()
if(ENABLE_ASAN)
register_compiler_flags(
DESCRIPTION "Enable AddressSanitizer"
-fsanitize=address
)
endif()
# --- Optimization level ---
if(DEBUG)
register_compiler_flags(

View File

@@ -419,15 +419,7 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
if(BUILDKITE)
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
if(filename STREQUAL "libbun-profile.a")
# libbun-profile.a is now over 5gb in size, compress it first
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -6 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
else()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
endforeach()

View File

@@ -86,11 +86,6 @@ optionx(ENABLE_LTO BOOL "If LTO (link-time optimization) should be used" DEFAULT
if(LINUX)
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
endif()
if(DEBUG AND APPLE AND CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ON)
else()
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT OFF)
endif()
optionx(ENABLE_PRETTIER BOOL "If prettier should be ran" DEFAULT OFF)

View File

@@ -1,16 +1,16 @@
get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME)
message(STATUS "Running script: ${SCRIPT_NAME}")
if(NOT ZIG_PATH OR NOT ZIG_COMMIT)
message(FATAL_ERROR "ZIG_PATH and ZIG_COMMIT required")
if(NOT ZIG_PATH OR NOT ZIG_COMMIT OR NOT ZIG_VERSION)
message(FATAL_ERROR "ZIG_PATH, ZIG_COMMIT, and ZIG_VERSION are required")
endif()
if(CMAKE_HOST_APPLE)
set(ZIG_OS_ABI "macos-none")
set(ZIG_OS "macos")
elseif(CMAKE_HOST_WIN32)
set(ZIG_OS_ABI "windows-gnu")
set(ZIG_OS "windows")
elseif(CMAKE_HOST_UNIX)
set(ZIG_OS_ABI "linux-musl")
set(ZIG_OS "linux")
else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_HOST_SYSTEM_NAME}")
endif()
@@ -28,16 +28,17 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}")
endif()
set(ZIG_NAME bootstrap-${ZIG_ARCH}-${ZIG_OS_ABI})
set(ZIG_FILENAME ${ZIG_NAME}.zip)
set(ZIG_NAME zig-${ZIG_OS}-${ZIG_ARCH}-${ZIG_VERSION})
if(CMAKE_HOST_WIN32)
set(ZIG_EXE "zig.exe")
set(ZIG_FILENAME ${ZIG_NAME}.zip)
else()
set(ZIG_EXE "zig")
set(ZIG_FILENAME ${ZIG_NAME}.tar.xz)
endif()
set(ZIG_DOWNLOAD_URL https://github.com/oven-sh/zig/releases/download/autobuild-${ZIG_COMMIT}/${ZIG_FILENAME})
set(ZIG_DOWNLOAD_URL https://ziglang.org/download/${ZIG_VERSION}/${ZIG_FILENAME})
execute_process(
COMMAND
@@ -61,8 +62,35 @@ if(NOT EXISTS ${ZIG_PATH}/${ZIG_EXE})
endif()
# Tools like VSCode need a stable path to the zig executable, on both Unix and Windows
# To workaround this, we create a `zig.exe` & `zls.exe` symlink on Unix.
# To workaround this, we create a `bun.exe` symlink on Unix.
if(NOT WIN32)
file(CREATE_LINK ${ZIG_PATH}/${ZIG_EXE} ${ZIG_PATH}/zig.exe SYMBOLIC)
file(CREATE_LINK ${ZIG_PATH}/zls ${ZIG_PATH}/zls.exe SYMBOLIC)
endif()
set(ZIG_REPOSITORY_PATH ${ZIG_PATH}/repository)
execute_process(
COMMAND
${CMAKE_COMMAND}
-DGIT_PATH=${ZIG_REPOSITORY_PATH}
-DGIT_REPOSITORY=oven-sh/zig
-DGIT_COMMIT=${ZIG_COMMIT}
-P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake
ERROR_STRIP_TRAILING_WHITESPACE
ERROR_VARIABLE
ZIG_REPOSITORY_ERROR
RESULT_VARIABLE
ZIG_REPOSITORY_RESULT
)
if(NOT ZIG_REPOSITORY_RESULT EQUAL 0)
message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}")
endif()
file(REMOVE_RECURSE ${ZIG_PATH}/lib)
# Use copy_directory instead of file(RENAME) because there were
# race conditions in CI where some files were not copied.
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${ZIG_REPOSITORY_PATH}/lib ${ZIG_PATH}/lib)
file(REMOVE_RECURSE ${ZIG_REPOSITORY_PATH})

View File

@@ -1,8 +1,8 @@
if(DEBUG)
set(bun bun-debug)
# elseif(ENABLE_SMOL)
# set(bun bun-smol-profile)
# set(bunStrip bun-smol)
elseif(ENABLE_SMOL)
set(bun bun-smol-profile)
set(bunStrip bun-smol)
elseif(ENABLE_VALGRIND)
set(bun bun-valgrind)
elseif(ENABLE_ASSERTIONS)
@@ -179,32 +179,6 @@ register_command(
${BUN_NODE_FALLBACKS_OUTPUTS}
)
# An embedded copy of react-refresh is used when the user forgets to install it.
# The library is not versioned alongside React.
set(BUN_REACT_REFRESH_OUTPUT ${BUN_NODE_FALLBACKS_OUTPUT}/react-refresh.js)
register_command(
TARGET
bun-node-fallbacks-react-refresh
COMMENT
"Building node-fallbacks/react-refresh.js"
CWD
${BUN_NODE_FALLBACKS_SOURCE}
COMMAND
${BUN_EXECUTABLE} build
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
--outfile=${BUN_REACT_REFRESH_OUTPUT}
--target=browser
--format=cjs
--minify
--define:process.env.NODE_ENV=\"'development'\"
SOURCES
${BUN_NODE_FALLBACKS_SOURCE}/package.json
${BUN_NODE_FALLBACKS_SOURCE}/bun.lock
${BUN_NODE_FALLBACKS_NODE_MODULES}
OUTPUTS
${BUN_REACT_REFRESH_OUTPUT}
)
set(BUN_ERROR_CODE_SCRIPT ${CWD}/src/codegen/generate-node-errors.ts)
set(BUN_ERROR_CODE_SOURCES
@@ -254,7 +228,6 @@ set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
${CODEGEN_PATH}/ZigGeneratedClasses+DOMIsoSubspaces.h
${CODEGEN_PATH}/ZigGeneratedClasses+lazyStructureImpl.h
${CODEGEN_PATH}/ZigGeneratedClasses.zig
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
)
register_command(
@@ -430,12 +403,9 @@ set(BUN_OBJECT_LUT_SOURCES
${CWD}/src/bun.js/bindings/ZigGlobalObject.lut.txt
${CWD}/src/bun.js/bindings/JSBuffer.cpp
${CWD}/src/bun.js/bindings/BunProcess.cpp
${CWD}/src/bun.js/bindings/ProcessBindingBuffer.cpp
${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp
${CWD}/src/bun.js/bindings/ProcessBindingFs.cpp
${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp
${CWD}/src/bun.js/modules/NodeModuleModule.cpp
${CODEGEN_PATH}/ZigGeneratedClasses.lut.txt
)
set(BUN_OBJECT_LUT_OUTPUTS
@@ -443,12 +413,9 @@ set(BUN_OBJECT_LUT_OUTPUTS
${CODEGEN_PATH}/ZigGlobalObject.lut.h
${CODEGEN_PATH}/JSBuffer.lut.h
${CODEGEN_PATH}/BunProcess.lut.h
${CODEGEN_PATH}/ProcessBindingBuffer.lut.h
${CODEGEN_PATH}/ProcessBindingConstants.lut.h
${CODEGEN_PATH}/ProcessBindingFs.lut.h
${CODEGEN_PATH}/ProcessBindingNatives.lut.h
${CODEGEN_PATH}/NodeModuleModule.lut.h
${CODEGEN_PATH}/ZigGeneratedClasses.lut.h
)
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
@@ -480,8 +447,6 @@ foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX})
bun-codegen-lut-${filename}
COMMENT
"Generating ${filename}.lut.h"
DEPENDS
${BUN_OBJECT_LUT_SOURCE}
COMMAND
${BUN_EXECUTABLE}
run
@@ -513,8 +478,6 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
${CODEGEN_PATH}/ZigGlobalObject.lut.h
)
WEBKIT_ADD_SOURCE_DEPENDENCIES(
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
${CODEGEN_PATH}/InternalModuleRegistryConstants.h
@@ -528,7 +491,8 @@ file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS}
list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
${CWD}/src/main.zig
${CWD}/root.zig
${CWD}/root_wasm.zig
${BUN_BINDGEN_ZIG_OUTPUTS}
)
@@ -537,7 +501,6 @@ set(BUN_ZIG_GENERATED_SOURCES
${BUN_FALLBACK_DECODER_OUTPUT}
${BUN_RUNTIME_JS_OUTPUT}
${BUN_NODE_FALLBACKS_OUTPUTS}
${BUN_REACT_REFRESH_OUTPUT}
${BUN_ERROR_CODE_OUTPUTS}
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
@@ -622,7 +585,6 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/sqlite/*.cpp
${CWD}/src/bun.js/bindings/webcrypto/*.cpp
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
${CWD}/src/bun.js/bindings/node/crypto/*.cpp
${CWD}/src/bun.js/bindings/v8/*.cpp
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
${CWD}/src/bake/*.cpp
@@ -738,7 +700,7 @@ endif()
# --- C/C++ Properties ---
set_target_properties(${bun} PROPERTIES
CXX_STANDARD 23
CXX_STANDARD 20
CXX_STANDARD_REQUIRED YES
CXX_EXTENSIONS YES
CXX_VISIBILITY_PRESET hidden
@@ -747,18 +709,6 @@ set_target_properties(${bun} PROPERTIES
VISIBILITY_INLINES_HIDDEN YES
)
if (NOT WIN32)
# Enable precompiled headers
# Only enable in these scenarios:
# 1. NOT in CI, OR
# 2. In CI AND BUN_CPP_ONLY is enabled
if(NOT CI OR (CI AND BUN_CPP_ONLY))
target_precompile_headers(${bun} PRIVATE
"$<$<COMPILE_LANGUAGE:CXX>:${CWD}/src/bun.js/bindings/root.h>"
)
endif()
endif()
# --- C/C++ Includes ---
if(WIN32)
@@ -772,7 +722,6 @@ target_include_directories(${bun} PRIVATE
${CWD}/src/bun.js/bindings
${CWD}/src/bun.js/bindings/webcore
${CWD}/src/bun.js/bindings/webcrypto
${CWD}/src/bun.js/bindings/node/crypto
${CWD}/src/bun.js/bindings/sqlite
${CWD}/src/bun.js/bindings/v8
${CWD}/src/bun.js/modules
@@ -871,15 +820,6 @@ if(NOT WIN32)
)
endif()
if (ENABLE_ASAN)
target_compile_options(${bun} PUBLIC
-fsanitize=address
)
target_link_libraries(${bun} PUBLIC
-fsanitize=address
)
endif()
target_compile_options(${bun} PUBLIC
-Werror=return-type
-Werror=return-stack-address
@@ -913,10 +853,6 @@ if(NOT WIN32)
-Werror
)
endif()
else()
target_compile_options(${bun} PUBLIC
-Wno-nullability-completeness
)
endif()
# --- Linker options ---
@@ -959,17 +895,28 @@ endif()
if(LINUX)
if(NOT ABI STREQUAL "musl")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
# on arm64
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
else()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=powf
)
endif()
endif()
if(NOT ABI STREQUAL "musl")
@@ -1098,7 +1045,6 @@ add_custom_target(dependencies DEPENDS ${BUN_TARGETS})
if(APPLE)
target_link_libraries(${bun} PRIVATE icucore resolv)
target_compile_definitions(${bun} PRIVATE U_DISABLE_RENAMING=1)
endif()
if(USE_STATIC_SQLITE)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
67f1d4ffd6b74db7e053fb129dcce620193c180d
4f8becea13a0021c8b71abd2dcc5899384973b66
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)

Some files were not shown because too many files have changed in this diff Show More