mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 15:38:46 +00:00
Compare commits
293 Commits
zack/patch
...
dave/node_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e665b7dae | ||
|
|
9f63d55efc | ||
|
|
c2d1389dbc | ||
|
|
87067e528d | ||
|
|
cf9eff9d5d | ||
|
|
bec04c7341 | ||
|
|
a44b7e41d2 | ||
|
|
de5e56336c | ||
|
|
1c648063fa | ||
|
|
1c3354bc95 | ||
|
|
d5d4f53e82 | ||
|
|
7ab4dc738f | ||
|
|
ebc7045ca4 | ||
|
|
848ad19d9e | ||
|
|
1da3436266 | ||
|
|
49e496399a | ||
|
|
9b8340a5b3 | ||
|
|
8efcc61a7b | ||
|
|
4d6480050c | ||
|
|
fc2c134bc6 | ||
|
|
4c4db1da37 | ||
|
|
77e14c8482 | ||
|
|
fba5d65003 | ||
|
|
c181cf45a7 | ||
|
|
5aeb4d9f79 | ||
|
|
1d9a8b4134 | ||
|
|
30881444df | ||
|
|
a2b4e3d4c2 | ||
|
|
e5662caa33 | ||
|
|
1f1ea7bf24 | ||
|
|
175746e569 | ||
|
|
005dd776b6 | ||
|
|
81dec2657f | ||
|
|
dbd320ccfa | ||
|
|
8f8d3968a3 | ||
|
|
0bbdd880e6 | ||
|
|
51257d5668 | ||
|
|
a2ae28d158 | ||
|
|
f04991f6bb | ||
|
|
80e651aca3 | ||
|
|
a5ba02804f | ||
|
|
4199fd4515 | ||
|
|
848327d333 | ||
|
|
bfb72f84c4 | ||
|
|
e4022ec3c7 | ||
|
|
a7f34c15fc | ||
|
|
a0ebb051b0 | ||
|
|
70ca2b76c3 | ||
|
|
e5ac4f94fa | ||
|
|
d547d8a30e | ||
|
|
32d9bb3ced | ||
|
|
75df73ef90 | ||
|
|
13907c4c29 | ||
|
|
87169b6bb3 | ||
|
|
244100c32f | ||
|
|
8a78b2241d | ||
|
|
bf8b6922bb | ||
|
|
7aa05ec542 | ||
|
|
f95ae9baee | ||
|
|
f7cb2da542 | ||
|
|
d966129992 | ||
|
|
6cb5cd2a87 | ||
|
|
080a2806af | ||
|
|
92c83fcd9e | ||
|
|
277ed9d138 | ||
|
|
879cb23163 | ||
|
|
d321ee97c5 | ||
|
|
3bfeb83e7e | ||
|
|
5a18b7d2fc | ||
|
|
e75ef69fb4 | ||
|
|
78021e34ae | ||
|
|
d7187592c0 | ||
|
|
5f1b569c52 | ||
|
|
e54fe5995b | ||
|
|
a2f68989a0 | ||
|
|
4a1e01d076 | ||
|
|
dd8b0a5889 | ||
|
|
8cadf66143 | ||
|
|
77cd03dad1 | ||
|
|
82b42ed851 | ||
|
|
2de82c0b3b | ||
|
|
30df04cd35 | ||
|
|
585c8299d8 | ||
|
|
375d8da8e6 | ||
|
|
0bd8db7162 | ||
|
|
d97260869d | ||
|
|
fdb58dc861 | ||
|
|
5f118704ec | ||
|
|
610c7f5e47 | ||
|
|
1e0b20f514 | ||
|
|
f6c89f4c25 | ||
|
|
907cd8d45d | ||
|
|
ac4523e903 | ||
|
|
24574dddb2 | ||
|
|
2da57f6d7b | ||
|
|
e2c3749965 | ||
|
|
57c6a7db35 | ||
|
|
c37891471a | ||
|
|
8ba0791dc8 | ||
|
|
f9371e59f2 | ||
|
|
79ddf0e47a | ||
|
|
177f3a8622 | ||
|
|
5a5f3d6b30 | ||
|
|
4e5d759c37 | ||
|
|
1a702dfdc7 | ||
|
|
3ef84816a6 | ||
|
|
6e9b592c56 | ||
|
|
a6b5543bd8 | ||
|
|
a4759eb147 | ||
|
|
732ed2b7df | ||
|
|
63fab9a82b | ||
|
|
ff17b427c8 | ||
|
|
ca44df7c88 | ||
|
|
9daa7ea555 | ||
|
|
2f0020f00f | ||
|
|
599d27d93e | ||
|
|
696f209ec1 | ||
|
|
1a6ead667b | ||
|
|
bbf2f5d716 | ||
|
|
9574044083 | ||
|
|
822b725bec | ||
|
|
dc775f75f0 | ||
|
|
738947bdec | ||
|
|
b7efeafc03 | ||
|
|
f5d1a17a5c | ||
|
|
03024e6b4e | ||
|
|
1d61676c7b | ||
|
|
23fb63f45c | ||
|
|
0be71edf3f | ||
|
|
6b50deb7b7 | ||
|
|
6ad3e6a5e3 | ||
|
|
b1dce1e241 | ||
|
|
cc42052039 | ||
|
|
ecf5aea071 | ||
|
|
79d21a0d02 | ||
|
|
43949151b1 | ||
|
|
16aad326e4 | ||
|
|
1a6f2d38da | ||
|
|
c6149d36b3 | ||
|
|
34e493f945 | ||
|
|
866b301626 | ||
|
|
cabc0fa0e6 | ||
|
|
d703354fcd | ||
|
|
37036f2eb0 | ||
|
|
ff0dc62314 | ||
|
|
f05f13780e | ||
|
|
4d74855fd7 | ||
|
|
5088a360b5 | ||
|
|
891b1907ae | ||
|
|
ae988642fb | ||
|
|
75e442c170 | ||
|
|
8808af1c99 | ||
|
|
b9d2a03ffc | ||
|
|
157b56cca5 | ||
|
|
caaeae123a | ||
|
|
20235a0d22 | ||
|
|
ae19489250 | ||
|
|
242c48f302 | ||
|
|
110849355c | ||
|
|
36fd3115f1 | ||
|
|
7d9b876968 | ||
|
|
40f0da1254 | ||
|
|
aea3964abd | ||
|
|
780bff781d | ||
|
|
c6a2ab5165 | ||
|
|
ef1c660708 | ||
|
|
11f8d3cb24 | ||
|
|
3ac9c3cc1c | ||
|
|
aa0f54cb93 | ||
|
|
8a3f882ef7 | ||
|
|
b7dd57ac32 | ||
|
|
cf1c7772f3 | ||
|
|
329d5e2af5 | ||
|
|
0098678a1d | ||
|
|
bf4c2caa11 | ||
|
|
226f42e04a | ||
|
|
96d19fcfe2 | ||
|
|
58483426cd | ||
|
|
25f7ef7338 | ||
|
|
412806bb22 | ||
|
|
4c87406391 | ||
|
|
5f7b96b58f | ||
|
|
f1151a84ad | ||
|
|
cdc68a2237 | ||
|
|
e866793eb3 | ||
|
|
cf9c418bcb | ||
|
|
138ef1328e | ||
|
|
e5e6d7ca43 | ||
|
|
cb81fc5445 | ||
|
|
d8caf7f9fa | ||
|
|
6f8ceb0ea9 | ||
|
|
02b589b2ce | ||
|
|
55d59ebf1f | ||
|
|
e1bc6c55d5 | ||
|
|
e42dede529 | ||
|
|
73ef93ffa3 | ||
|
|
475f71a2a1 | ||
|
|
af6035ce36 | ||
|
|
bfa395d1d5 | ||
|
|
76bb5b8619 | ||
|
|
6354e608a7 | ||
|
|
28d9527189 | ||
|
|
fbcd843c58 | ||
|
|
6b0c2383d5 | ||
|
|
f1a748fcab | ||
|
|
87296405a7 | ||
|
|
4dfbabd590 | ||
|
|
ea1135a464 | ||
|
|
f1755df6f0 | ||
|
|
b01f67857f | ||
|
|
9fe7ea340d | ||
|
|
68ba6b9e79 | ||
|
|
a703d2d019 | ||
|
|
5137213f86 | ||
|
|
c98da7daf7 | ||
|
|
25252c9b46 | ||
|
|
21ff566d69 | ||
|
|
a36a01e235 | ||
|
|
9ae870546b | ||
|
|
a4b0817cd3 | ||
|
|
c2a5451e93 | ||
|
|
150ae032e8 | ||
|
|
37ee951448 | ||
|
|
b8c70ba6cf | ||
|
|
cbcf9506d9 | ||
|
|
92bd629e60 | ||
|
|
e7031b07ae | ||
|
|
41a5ebe09f | ||
|
|
cd97c21038 | ||
|
|
57d22908d1 | ||
|
|
749c51d71a | ||
|
|
80bbad6568 | ||
|
|
da1b3d2007 | ||
|
|
050a4b5c71 | ||
|
|
6f52b649da | ||
|
|
bbc621adff | ||
|
|
71c223e111 | ||
|
|
ee25618197 | ||
|
|
4f3ef07455 | ||
|
|
fad58168d2 | ||
|
|
4fefb8507c | ||
|
|
39d5c8a8a5 | ||
|
|
6a43f7f52d | ||
|
|
c486a049a8 | ||
|
|
5a0b935231 | ||
|
|
688ddbda74 | ||
|
|
b9fba61153 | ||
|
|
dfca8147df | ||
|
|
b85c30cd89 | ||
|
|
f83e42de20 | ||
|
|
927dde7b34 | ||
|
|
c85576b15d | ||
|
|
db60af1a44 | ||
|
|
292035efcb | ||
|
|
823d790b1c | ||
|
|
5573b2e899 | ||
|
|
2f0789af7c | ||
|
|
f8e640c018 | ||
|
|
b0018465cc | ||
|
|
dd057613b9 | ||
|
|
bf14a09a23 | ||
|
|
81711faebe | ||
|
|
86fd13643b | ||
|
|
861be5560e | ||
|
|
7f3e6f23f6 | ||
|
|
5f34387bea | ||
|
|
e22383dff9 | ||
|
|
c1a5b4acc5 | ||
|
|
61b343cf7d | ||
|
|
ec3487867c | ||
|
|
3c52344b53 | ||
|
|
f37d89afb1 | ||
|
|
a0b5006b78 | ||
|
|
1a10f2b46e | ||
|
|
acc0fe6db4 | ||
|
|
6e89419250 | ||
|
|
d5aa7265df | ||
|
|
fe27a181d3 | ||
|
|
145b9e7d09 | ||
|
|
fab33be408 | ||
|
|
da27f22622 | ||
|
|
20d33e480b | ||
|
|
066e2f2589 | ||
|
|
9697a2b058 | ||
|
|
b37f94d396 | ||
|
|
eff2ea6271 | ||
|
|
d105b048b1 | ||
|
|
d356e27a4d | ||
|
|
96e84b276b | ||
|
|
60ef13e079 | ||
|
|
10ce5ddd24 | ||
|
|
cab78045b7 | ||
|
|
d5e3ea0ab7 |
31
.buildkite/bootstrap.yml
Normal file
31
.buildkite/bootstrap.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
# Uploads the latest CI workflow to Buildkite.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# Changes to this file must be manually edited here:
|
||||
# https://buildkite.com/bun/bun/settings/steps
|
||||
steps:
|
||||
- if: "build.pull_request.repository.fork"
|
||||
block: ":eyes:"
|
||||
prompt: "Did you review the PR?"
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- ".buildkite/scripts/prepare-build.sh"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
- "darwin-x64-build-bun"
|
||||
- "linux-aarch64-build-bun"
|
||||
- "linux-x64-build-bun"
|
||||
- "linux-x64-baseline-build-bun"
|
||||
- "windows-x64-build-bun"
|
||||
- "windows-x64-baseline-build-bun"
|
||||
command:
|
||||
- ".buildkite/scripts/upload-release.sh"
|
||||
63
.buildkite/ci.md
Normal file
63
.buildkite/ci.md
Normal file
@@ -0,0 +1,63 @@
|
||||
## CI
|
||||
|
||||
How does CI work?
|
||||
|
||||
### Building
|
||||
|
||||
Bun is built on macOS, Linux, and Windows. The process is split into the following steps, the first 3 of which are able to run in parallel:
|
||||
|
||||
#### 1. `build-deps`
|
||||
|
||||
Builds the static libaries in `src/deps` and outputs a directory: `build/bun-deps`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/all-dependencies.ps1`](scripts/all-dependencies.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/all-dependencies.sh`](scripts/all-dependencies.sh)
|
||||
|
||||
#### 2. `build-zig`
|
||||
|
||||
Builds the Zig object file: `build/bun-zig.o`. Since `zig build` supports cross-compiling, this step is run on macOS aarch64 since we have observed it to be the fastest.
|
||||
|
||||
- on macOS and Linux, this runs the script: [`scripts/build-bun-zig.sh`](scripts/build-bun-zig.sh)
|
||||
|
||||
#### 3. `build-cpp`
|
||||
|
||||
Builds the C++ object file: `build/bun-cpp-objects.a`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/build-bun-cpp.ps1`](scripts/build-bun-cpp.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/build-bun-cpp.sh`](scripts/build-bun-cpp.sh)
|
||||
|
||||
#### 4. `link` / `build-bun`
|
||||
|
||||
After the `build-deps`, `build-zig`, and `build-cpp` steps have completed, this step links the Zig object file and C++ object file into a single binary: `bun-<os>-<arch>.zip`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/buildkite-link-bun.ps1`](scripts/buildkite-link-bun.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/buildkite-link-bun.sh`](scripts/buildkite-link-bun.sh)
|
||||
|
||||
To speed up the build, thare are two options:
|
||||
|
||||
- `--fast`: This disables the LTO (link-time optimization) step.
|
||||
- without `--fast`: This runs the LTO step, which is the default. The binaries that are release to Github are always built with LTO.
|
||||
|
||||
### Testing
|
||||
|
||||
### FAQ
|
||||
|
||||
> How do I add a new CI agent?
|
||||
|
||||
> How do I add/modify system dependencies?
|
||||
|
||||
> How do I SSH into a CI agent?
|
||||
|
||||
### Known issues
|
||||
|
||||
These are things that we know about, but haven't fixed or optimized yet.
|
||||
|
||||
- There is no `scripts/build-bun-zig.ps1` for Windows.
|
||||
|
||||
- The `build-deps` step does not cache in CI, so it re-builds each time (though it does use ccache). It attempts to check the `BUN_DEPS_CACHE_DIR` environment variable, but for some reason it doesn't work.
|
||||
|
||||
- Windows and Linux machines sometimes take up to 1-2 minutes to start tests. This is because robobun is listening for when the job is scheduled to provision the VM. Instead, it can start provisioning during the link step, or keep a pool of idle VMs around (but it's unclear how more expensive this is).
|
||||
|
||||
- There are a limited number of macOS VMs. This is because they are expensive and manually provisioned, mostly through MacStadium. If wait times are too long we can just provision more, or buy some.
|
||||
|
||||
- To prevent idle machines, robobun periodically checks for idle machines and terminates them. Before doing this, it checks to see if the machine is connected as an agent to Buildkite. However, sometimes the machine picks up a job in-between this time, and the job is terminated.
|
||||
790
.buildkite/ci.yml
Normal file
790
.buildkite/ci.yml
Normal file
@@ -0,0 +1,790 @@
|
||||
# Build and test Bun on macOS, Linux, and Windows.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# If a step has the `robobun: true` label, robobun will listen
|
||||
# to webhooks from Buildkite and provision a VM to run the step.
|
||||
#
|
||||
# Changes to this file will be automatically uploaded on the next run
|
||||
# for a particular commit.
|
||||
#
|
||||
# Future tests machines to be added:
|
||||
# - macOS 12
|
||||
# - Windows Server 2016 & 2019
|
||||
# - Amazon Linux 2 & 2023
|
||||
# - CentOS / RHEL / Fedora / other Linux distros
|
||||
# - Docker containers
|
||||
# - Rasberry Pi?
|
||||
steps:
|
||||
# macOS aarch64
|
||||
- key: "darwin-aarch64"
|
||||
group: ":darwin: aarch64"
|
||||
steps:
|
||||
- key: "darwin-aarch64-build-deps"
|
||||
label: ":darwin: aarch64 - build-deps"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-deps.sh"
|
||||
|
||||
- key: "darwin-aarch64-build-zig"
|
||||
label: ":darwin: aarch64 - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh darwin aarch64"
|
||||
|
||||
- key: "darwin-aarch64-build-cpp"
|
||||
label: ":darwin: aarch64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-cpp.sh"
|
||||
|
||||
- key: "darwin-aarch64-build-bun"
|
||||
label: ":darwin: aarch64 - build-bun"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-deps"
|
||||
- "darwin-aarch64-build-zig"
|
||||
- "darwin-aarch64-build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-bun.sh"
|
||||
|
||||
- key: "darwin-aarch64-test-macos-14"
|
||||
label: ":darwin: 14 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
release: "14"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
|
||||
|
||||
- key: "darwin-aarch64-test-macos-13"
|
||||
label: ":darwin: 13 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
release: "13"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
|
||||
|
||||
# macOS x64
|
||||
- key: "darwin-x64"
|
||||
group: ":darwin: x64"
|
||||
steps:
|
||||
- key: "darwin-x64-build-deps"
|
||||
label: ":darwin: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-deps.sh"
|
||||
|
||||
- key: "darwin-x64-build-zig"
|
||||
label: ":darwin: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh darwin x64"
|
||||
|
||||
- key: "darwin-x64-build-cpp"
|
||||
label: ":darwin: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-cpp.sh"
|
||||
|
||||
- key: "darwin-x64-build-bun"
|
||||
label: ":darwin: x64 - build-bun"
|
||||
depends_on:
|
||||
- "darwin-x64-build-deps"
|
||||
- "darwin-x64-build-zig"
|
||||
- "darwin-x64-build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-bun.sh"
|
||||
|
||||
- key: "darwin-x64-test-macos-14"
|
||||
label: ":darwin: 14 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 2
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-x64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
release: "14"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
|
||||
|
||||
- key: "darwin-x64-test-macos-13"
|
||||
label: ":darwin: 13 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 2
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-x64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
release: "13"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
|
||||
|
||||
# Linux aarch64
|
||||
- key: "linux-aarch64"
|
||||
group: ":linux: aarch64"
|
||||
steps:
|
||||
- key: "linux-aarch64-build-deps"
|
||||
label: ":linux: aarch64 - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-deps.sh"
|
||||
|
||||
- key: "linux-aarch64-build-zig"
|
||||
label: ":linux: aarch64 - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh linux aarch64"
|
||||
|
||||
- key: "linux-aarch64-build-cpp"
|
||||
label: ":linux: aarch64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-cpp.sh"
|
||||
|
||||
- key: "linux-aarch64-build-bun"
|
||||
label: ":linux: aarch64 - build-bun"
|
||||
depends_on:
|
||||
- "linux-aarch64-build-deps"
|
||||
- "linux-aarch64-build-zig"
|
||||
- "linux-aarch64-build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-bun.sh"
|
||||
|
||||
- key: "linux-aarch64-test-debian-12"
|
||||
label: ":debian: 12 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
- key: "linux-aarch64-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
- key: "linux-aarch64-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
# Linux x64
|
||||
- key: "linux-x64"
|
||||
group: ":linux: x64"
|
||||
steps:
|
||||
- key: "linux-x64-build-deps"
|
||||
label: ":linux: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-deps.sh"
|
||||
|
||||
- key: "linux-x64-build-zig"
|
||||
label: ":linux: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh linux x64"
|
||||
|
||||
- key: "linux-x64-build-cpp"
|
||||
label: ":linux: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-cpp.sh"
|
||||
|
||||
- key: "linux-x64-build-bun"
|
||||
label: ":linux: x64 - build-bun"
|
||||
depends_on:
|
||||
- "linux-x64-build-deps"
|
||||
- "linux-x64-build-zig"
|
||||
- "linux-x64-build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-bun.sh"
|
||||
|
||||
- key: "linux-x64-test-debian-12"
|
||||
label: ":debian: 12 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
- key: "linux-x64-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
- key: "linux-x64-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
# Linux x64-baseline
|
||||
- key: "linux-x64-baseline"
|
||||
group: ":linux: x64-baseline"
|
||||
steps:
|
||||
- key: "linux-x64-baseline-build-deps"
|
||||
label: ":linux: x64-baseline - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-deps.sh"
|
||||
|
||||
- key: "linux-x64-baseline-build-zig"
|
||||
label: ":linux: x64-baseline - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh linux x64"
|
||||
|
||||
- key: "linux-x64-baseline-build-cpp"
|
||||
label: ":linux: x64-baseline - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-cpp.sh"
|
||||
|
||||
- key: "linux-x64-baseline-build-bun"
|
||||
label: ":linux: x64-baseline - build-bun"
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-deps"
|
||||
- "linux-x64-baseline-build-zig"
|
||||
- "linux-x64-baseline-build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-bun.sh"
|
||||
|
||||
- key: "linux-x64-baseline-test-debian-12"
|
||||
label: ":debian: 12 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
- key: "linux-x64-baseline-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
- key: "linux-x64-baseline-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 5
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
# Windows x64
|
||||
- key: "windows-x64"
|
||||
group: ":windows: x64"
|
||||
steps:
|
||||
- key: "windows-x64-build-deps"
|
||||
label: ":windows: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
- "build\\bun-deps\\*.lib"
|
||||
env:
|
||||
SCCACHE_DIR: "$$HOME\\.cache\\sccache"
|
||||
ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache"
|
||||
SCCACHE_IGNORE_SERVER_IO_ERROR: "1"
|
||||
command:
|
||||
- ".\\scripts\\all-dependencies.ps1"
|
||||
|
||||
- key: "windows-x64-build-zig"
|
||||
label: ":windows: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin" # cross-compile on Linux or Darwin
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh windows x64"
|
||||
|
||||
- key: "windows-x64-build-cpp"
|
||||
label: ":windows: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
# HACK: See scripts/build-bun-cpp.ps1
|
||||
# - "build\\bun-cpp-objects.a"
|
||||
- "build\\bun-cpp-objects.a.*"
|
||||
command:
|
||||
- ".\\scripts\\build-bun-cpp.ps1"
|
||||
|
||||
- key: "windows-x64-build-bun"
|
||||
label: ":windows: x64 - build-bun"
|
||||
depends_on:
|
||||
- "windows-x64-build-deps"
|
||||
- "windows-x64-build-zig"
|
||||
- "windows-x64-build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
- "bun-windows-x64.zip"
|
||||
- "bun-windows-x64-profile.zip"
|
||||
- "features.json"
|
||||
env:
|
||||
SCCACHE_DIR: "$$HOME\\.cache\\sccache"
|
||||
ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache"
|
||||
SCCACHE_IGNORE_SERVER_IO_ERROR: "1"
|
||||
command:
|
||||
- ".\\scripts\\buildkite-link-bun.ps1"
|
||||
|
||||
- key: "windows-x64-test-bun"
|
||||
label: ":windows: x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 1
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "windows-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
|
||||
|
||||
# Windows x64-baseline
|
||||
- key: "windows-x64-baseline"
|
||||
group: ":windows: x64-baseline"
|
||||
steps:
|
||||
- key: "windows-x64-baseline-build-deps"
|
||||
label: ":windows: x64-baseline - build-deps"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
- "build\\bun-deps\\*.lib"
|
||||
env:
|
||||
SCCACHE_DIR: "$$HOME\\.cache\\sccache"
|
||||
ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache"
|
||||
SCCACHE_IGNORE_SERVER_IO_ERROR: "1"
|
||||
USE_BASELINE_BUILD: "1"
|
||||
command:
|
||||
- ".\\scripts\\all-dependencies.ps1"
|
||||
|
||||
- key: "windows-x64-baseline-build-zig"
|
||||
label: ":windows: x64-baseline - build-zig"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin" # cross-compile on Linux or Darwin
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "./.buildkite/scripts/build-zig.sh windows x64"
|
||||
|
||||
- key: "windows-x64-baseline-build-cpp"
|
||||
label: ":windows: x64-baseline - build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
# HACK: See scripts/build-bun-cpp.ps1
|
||||
# - "build\\bun-cpp-objects.a"
|
||||
- "build\\bun-cpp-objects.a.*"
|
||||
env:
|
||||
SCCACHE_DIR: "$$HOME\\.cache\\sccache"
|
||||
ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache"
|
||||
SCCACHE_IGNORE_SERVER_IO_ERROR: "1"
|
||||
USE_BASELINE_BUILD: "1"
|
||||
command:
|
||||
- ".\\scripts\\build-bun-cpp.ps1"
|
||||
|
||||
- key: "windows-x64-baseline-build-bun"
|
||||
label: ":windows: x64-baseline - build-bun"
|
||||
depends_on:
|
||||
- "windows-x64-baseline-build-deps"
|
||||
- "windows-x64-baseline-build-zig"
|
||||
- "windows-x64-baseline-build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
artifact_paths:
|
||||
- "bun-windows-x64-baseline.zip"
|
||||
- "bun-windows-x64-baseline-profile.zip"
|
||||
- "features.json"
|
||||
env:
|
||||
SCCACHE_DIR: "$$HOME\\.cache\\sccache"
|
||||
ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache"
|
||||
SCCACHE_IGNORE_SERVER_IO_ERROR: "1"
|
||||
USE_BASELINE_BUILD: "1"
|
||||
command:
|
||||
- ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True"
|
||||
|
||||
- key: "windows-x64-baseline-test-bun"
|
||||
label: ":windows: x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 1
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "windows-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"
|
||||
55
.buildkite/scripts/build-bun.sh
Executable file
55
.buildkite/scripts/build-bun.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
cwd="$(pwd)"
|
||||
|
||||
mkdir -p build
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig"
|
||||
source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split
|
||||
cd build
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_LINK_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
run_command ninja -v -j "$CPUS"
|
||||
run_command ls
|
||||
|
||||
tag="bun-$BUILDKITE_GROUP_KEY"
|
||||
if [ "$USE_LTO" == "OFF" ]; then
|
||||
# Remove OS check when LTO is enabled on macOS again
|
||||
if [[ "$tag" == *"darwin"* ]]; then
|
||||
tag="$tag-nolto"
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in bun bun-profile; do
|
||||
dir="$tag"
|
||||
if [ "$name" == "bun-profile" ]; then
|
||||
dir="$tag-profile"
|
||||
fi
|
||||
run_command chmod +x "$name"
|
||||
run_command "./$name" --revision
|
||||
run_command mkdir -p "$dir"
|
||||
run_command mv "$name" "$dir/$name"
|
||||
run_command zip -r "$dir.zip" "$dir"
|
||||
source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip"
|
||||
done
|
||||
35
.buildkite/scripts/build-cpp.sh
Executable file
35
.buildkite/scripts/build-cpp.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
export FORCE_UPDATE_SUBMODULES=1
|
||||
source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)"
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
mkdir -p build
|
||||
cd build
|
||||
mkdir -p tmp_modules tmp_functions js codegen
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DBUN_CPP_ONLY="1" \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DCPU_TARGET="$CPU_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
chmod +x compile-cpp-only.sh
|
||||
source compile-cpp-only.sh -v -j "$CPUS"
|
||||
{ set +x; } 2>/dev/null
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split
|
||||
0
.buildkite/scripts/build-deps.ps1
Normal file
0
.buildkite/scripts/build-deps.ps1
Normal file
22
.buildkite/scripts/build-deps.sh
Executable file
22
.buildkite/scripts/build-deps.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)"
|
||||
|
||||
artifacts=(
|
||||
libcrypto.a libssl.a libdecrepit.a
|
||||
libcares.a
|
||||
libarchive.a
|
||||
liblolhtml.a
|
||||
libmimalloc.a libmimalloc.o
|
||||
libtcc.a
|
||||
libz.a
|
||||
libzstd.a
|
||||
libdeflate.a
|
||||
liblshpack.a
|
||||
)
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact"
|
||||
done
|
||||
40
.buildkite/scripts/build-old-js.sh
Executable file
40
.buildkite/scripts/build-old-js.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function assert_bun() {
|
||||
if ! command -v bun &>/dev/null; then
|
||||
echo "error: bun is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_make() {
|
||||
if ! command -v make &>/dev/null; then
|
||||
echo "error: make is not installed" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function build_node_fallbacks() {
|
||||
local cwd="src/node-fallbacks"
|
||||
run_command bun install --cwd "$cwd" --frozen-lockfile
|
||||
run_command bun run --cwd "$cwd" build
|
||||
}
|
||||
|
||||
function build_old_js() {
|
||||
run_command bun install --frozen-lockfile
|
||||
run_command make runtime_js fallback_decoder bun_error
|
||||
}
|
||||
|
||||
assert_bun
|
||||
assert_make
|
||||
build_node_fallbacks
|
||||
build_old_js
|
||||
80
.buildkite/scripts/build-zig.sh
Executable file
80
.buildkite/scripts/build-zig.sh
Executable file
@@ -0,0 +1,80 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
source "$(dirname "$0")/env.sh"
|
||||
|
||||
function assert_target() {
|
||||
local arch="${2-$(uname -m)}"
|
||||
case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in
|
||||
x64 | x86_64 | amd64)
|
||||
export ZIG_ARCH="x86_64"
|
||||
if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export ZIG_CPU_TARGET="nehalem"
|
||||
else
|
||||
export ZIG_CPU_TARGET="haswell"
|
||||
fi
|
||||
;;
|
||||
aarch64 | arm64)
|
||||
export ZIG_ARCH="aarch64"
|
||||
export ZIG_CPU_TARGET="native"
|
||||
;;
|
||||
*)
|
||||
echo "error: Unsupported architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
local os="${1-$(uname -s)}"
|
||||
case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in
|
||||
linux)
|
||||
export ZIG_TARGET="$ZIG_ARCH-linux-gnu" ;;
|
||||
darwin)
|
||||
export ZIG_TARGET="$ZIG_ARCH-macos-none" ;;
|
||||
windows)
|
||||
export ZIG_TARGET="$ZIG_ARCH-windows-msvc" ;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_target "$@"
|
||||
|
||||
# Since the zig build depends on files from the zig submodule,
|
||||
# make sure to update the submodule before building.
|
||||
run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
|
||||
|
||||
# TODO: Move these to be part of the CMake build
|
||||
source "$(dirname "$0")/build-old-js.sh"
|
||||
|
||||
cwd="$(pwd)"
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
run_command cmake .. "${CMAKE_FLAGS[@]}" \
|
||||
-GNinja \
|
||||
-DNO_CONFIGURE_DEPENDS="1" \
|
||||
-DNO_CODEGEN="0" \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
|
||||
-DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \
|
||||
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
|
||||
-DARCH="$ZIG_ARCH" \
|
||||
-DCPU_TARGET="$ZIG_CPU_TARGET" \
|
||||
-DZIG_TARGET="$ZIG_TARGET" \
|
||||
-DUSE_LTO="$USE_LTO" \
|
||||
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
|
||||
-DCANARY="$CANARY" \
|
||||
-DGIT_SHA="$GIT_SHA"
|
||||
|
||||
export ONLY_ZIG="1"
|
||||
run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS"
|
||||
|
||||
cd ..
|
||||
source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o"
|
||||
47
.buildkite/scripts/download-artifact.ps1
Executable file
47
.buildkite/scripts/download-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Join-File() {
|
||||
if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Download-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if ($Split) {
|
||||
& buildkite-agent artifact download "$Path.*" --debug --debug-http
|
||||
Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles
|
||||
} else {
|
||||
& buildkite-agent artifact download "$Path" --debug --debug-http
|
||||
}
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Join-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Download-Buildkite-Artifact $Path
|
||||
}
|
||||
46
.buildkite/scripts/download-artifact.sh
Executable file
46
.buildkite/scripts/download-artifact.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local path="$1"; shift
|
||||
local split="0"
|
||||
local args=()
|
||||
while true; do
|
||||
if [ -z "$1" ]; then
|
||||
break
|
||||
fi
|
||||
case "$1" in
|
||||
--split) split="1"; shift ;;
|
||||
*) args+=("$1"); shift ;;
|
||||
esac
|
||||
done
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command buildkite-agent artifact download "$path.*" . "${args[@]}"
|
||||
run_command cat $path.?? > "$path"
|
||||
run_command rm -f $path.??
|
||||
else
|
||||
run_command buildkite-agent artifact download "$path" . "${args[@]}"
|
||||
fi
|
||||
if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
download_buildkite_artifact "$@"
|
||||
119
.buildkite/scripts/env.sh
Executable file
119
.buildkite/scripts/env.sh
Executable file
@@ -0,0 +1,119 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_os() {
|
||||
local os="$(uname -s)"
|
||||
case "$os" in
|
||||
Linux)
|
||||
echo "linux" ;;
|
||||
Darwin)
|
||||
echo "darwin" ;;
|
||||
*)
|
||||
echo "error: Unsupported operating system: $os" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_arch() {
|
||||
local arch="$(uname -m)"
|
||||
case "$arch" in
|
||||
aarch64 | arm64)
|
||||
echo "aarch64" ;;
|
||||
x86_64 | amd64)
|
||||
echo "x64" ;;
|
||||
*)
|
||||
echo "error: Unknown architecture: $arch" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_STEP_KEY" ]; then
|
||||
echo "error: Cannot find step key for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'"
|
||||
exit 1
|
||||
fi
|
||||
# Skip os and arch checks for Zig, since it's cross-compiled on macOS
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then
|
||||
local os="$(assert_os)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'"
|
||||
exit 1
|
||||
fi
|
||||
local arch="$(assert_arch)"
|
||||
if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then
|
||||
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function export_environment() {
|
||||
source "$(realpath $(dirname "$0")/../../scripts/env.sh)"
|
||||
source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)"
|
||||
{ set +x; } 2>/dev/null
|
||||
export GIT_SHA="$BUILDKITE_COMMIT"
|
||||
export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY"
|
||||
export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY"
|
||||
export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
|
||||
export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY"
|
||||
if [ "$(assert_arch)" == "aarch64" ]; then
|
||||
export CPU_TARGET="native"
|
||||
elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
|
||||
export CPU_TARGET="nehalem"
|
||||
else
|
||||
export CPU_TARGET="haswell"
|
||||
fi
|
||||
if [[ "$BUILDKITE_STEP_KEY" == *"nolto"* ]]; then
|
||||
export USE_LTO="OFF"
|
||||
else
|
||||
export USE_LTO="ON"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists release &> /dev/null); then
|
||||
export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)"
|
||||
else
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists canary &> /dev/null); then
|
||||
export CANARY="$(buildkite-agent meta-data get canary)"
|
||||
else
|
||||
export CANARY="1"
|
||||
fi
|
||||
if $(buildkite-agent meta-data exists assertions &> /dev/null); then
|
||||
export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)"
|
||||
else
|
||||
export USE_DEBUG_JSC="OFF"
|
||||
fi
|
||||
if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ]; then
|
||||
rm -rf "$CCACHE_DIR"
|
||||
rm -rf "$SCCACHE_DIR"
|
||||
rm -rf "$ZIG_LOCAL_CACHE_DIR"
|
||||
rm -rf "$BUN_DEPS_CACHE_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
export_environment
|
||||
97
.buildkite/scripts/prepare-build.sh
Executable file
97
.buildkite/scripts/prepare-build.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_jq() {
|
||||
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
|
||||
}
|
||||
|
||||
function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_release() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
run_command buildkite-agent meta-data set canary "0"
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ]; then
|
||||
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
|
||||
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
|
||||
if [ "$tag" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
|
||||
if [ "$revision" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
canary="$revision"
|
||||
fi
|
||||
fi
|
||||
run_command buildkite-agent meta-data set canary "$canary"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_pipeline() {
|
||||
local path="$1"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Cannot find pipeline: $path"
|
||||
exit 1
|
||||
fi
|
||||
run_command buildkite-agent pipeline upload "$path"
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_release
|
||||
assert_canary
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
47
.buildkite/scripts/upload-artifact.ps1
Executable file
47
.buildkite/scripts/upload-artifact.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string[]] $Paths,
|
||||
[switch] $Split
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Assert-Buildkite-Agent() {
|
||||
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Assert-Split-File() {
|
||||
if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) {
|
||||
Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Upload-Buildkite-Artifact() {
|
||||
param (
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string] $Path,
|
||||
)
|
||||
if (-not (Test-Path $Path)) {
|
||||
Write-Error "Could not find artifact: $Path"
|
||||
exit 1
|
||||
}
|
||||
if ($Split) {
|
||||
Remove-Item -Path "$Path.*" -Force
|
||||
Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose
|
||||
$Path = "$Path.*"
|
||||
}
|
||||
& buildkite-agent artifact upload "$Path" --debug --debug-http
|
||||
}
|
||||
|
||||
Assert-Buildkite-Agent
|
||||
if ($Split) {
|
||||
Assert-Split-File
|
||||
}
|
||||
|
||||
foreach ($Path in $Paths) {
|
||||
Upload-Buildkite-Artifact $Path
|
||||
}
|
||||
54
.buildkite/scripts/upload-artifact.sh
Executable file
54
.buildkite/scripts/upload-artifact.sh
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_split() {
|
||||
if ! command -v split &> /dev/null; then
|
||||
echo "error: Cannot find split, please install it:"
|
||||
echo "https://www.gnu.org/software/coreutils/split"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_artifact() {
|
||||
local path="$1"; shift
|
||||
local split="0"
|
||||
local args=()
|
||||
while true; do
|
||||
if [ -z "$1" ]; then
|
||||
break
|
||||
fi
|
||||
case "$1" in
|
||||
--split) split="1"; shift ;;
|
||||
*) args+=("$1"); shift ;;
|
||||
esac
|
||||
done
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Could not find artifact: $path"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$split" == "1" ]; then
|
||||
run_command rm -f "$path."*
|
||||
run_command split -b 50MB -d "$path" "$path."
|
||||
run_command buildkite-agent artifact upload "$path.*" "${args[@]}"
|
||||
else
|
||||
run_command buildkite-agent artifact upload "$path" "${args[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_buildkite_agent
|
||||
upload_buildkite_artifact "$@"
|
||||
211
.buildkite/scripts/upload-release.sh
Executable file
211
.buildkite/scripts/upload-release.sh
Executable file
@@ -0,0 +1,211 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
|
||||
echo "error: Cannot upload release from a pull request"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_BRANCH" != "main" ]; then
|
||||
echo "error: Cannot upload release from a branch other than main"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact" &
|
||||
done
|
||||
wait
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
312
.github/workflows/build-darwin.yml
vendored
312
.github/workflows/build-darwin.yml
vendored
@@ -1,312 +0,0 @@
|
||||
name: Build Darwin
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: macos-12-large
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_VERSION: 1.1.8
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
print_versions() {
|
||||
git submodule | grep -v WebKit
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
./scripts/update-submodules.sh
|
||||
- name: Build Submodules
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
./scripts/all-dependencies.sh
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
chmod +x compile-cpp-only.sh
|
||||
./compile-cpp-only.sh -v
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: darwin
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{ runner.temp }}/link-build
|
||||
cd ${{ runner.temp }}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
- name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/link-build
|
||||
chmod +x bun-profile bun
|
||||
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
64
.github/workflows/build-linux.yml
vendored
64
.github/workflows/build-linux.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Build Linux
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
zig-optimize:
|
||||
type: string
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Linux
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: linux
|
||||
only-zig: false
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
zig-optimize: ${{ inputs.zig-optimize }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
344
.github/workflows/build-windows.yml
vendored
344
.github/workflows/build-windows.yml
vendored
@@ -1,344 +0,0 @@
|
||||
name: Build Windows
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: windows
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
bun-version:
|
||||
type: string
|
||||
default: 1.1.7
|
||||
|
||||
env:
|
||||
# Must specify exact version of LLVM for Windows
|
||||
LLVM_VERSION: 16.0.6
|
||||
BUN_VERSION: ${{ inputs.bun-version }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: 1
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
|
||||
CI: true
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
$data = "$(& {
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
|
||||
echo 1
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "hash=${hash}" >> $env:GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Build Dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
choco install -y nasm --version=2.16.01
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
if-no-files-found: error
|
||||
codegen:
|
||||
name: Codegen
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Codegen
|
||||
run: |
|
||||
./scripts/cross-compile-codegen.sh win32 x64
|
||||
- if: ${{ inputs.canary }}
|
||||
name: Calculate Revision
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}"
|
||||
bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- name: Upload bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build-codegen-win32-x64
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
needs: codegen
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: build/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: windows
|
||||
zig-optimize: ReleaseSafe
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
- codegen
|
||||
steps:
|
||||
- name: Install VS2022 BuildTools 17.9.7
|
||||
run: choco install -y visualstudio2022buildtools --version=117.9.7.0 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/180911598_-255012421/channel"
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: bun-cpp
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: bun-zig
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `
|
||||
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Prepare
|
||||
run: |
|
||||
$Dist = mkdir -Force "bun-${{ inputs.tag }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive -Force "$Dist" "${Dist}.zip"
|
||||
$Dist = "$Dist-profile"
|
||||
MkDir -Force "$Dist"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
cp -r build\bun.pdb "$Dist\bun.pdb"
|
||||
Compress-Archive -Force "$Dist" "$Dist.zip"
|
||||
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-feature-data
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-feature-data
|
||||
path: features.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
122
.github/workflows/build-zig.yml
vendored
122
.github/workflows/build-zig.yml
vendored
@@ -1,122 +0,0 @@
|
||||
name: Build Zig
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }}
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
os:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
default: false
|
||||
zig-optimize:
|
||||
type: string # 'ReleaseSafe' or 'ReleaseFast'
|
||||
default: ReleaseFast
|
||||
canary:
|
||||
type: boolean
|
||||
default: ${{ github.ref == 'refs/heads/main' }}
|
||||
only-zig:
|
||||
type: boolean
|
||||
default: true
|
||||
no-cache:
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
build-zig:
|
||||
name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Calculate Cache Key
|
||||
id: cache
|
||||
run: |
|
||||
echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-docker-
|
||||
path: |
|
||||
${{ runner.temp }}/dockercache
|
||||
- name: Setup Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: false
|
||||
target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }}
|
||||
cache-from: |
|
||||
type=local,src=${{ runner.temp }}/dockercache
|
||||
cache-to: |
|
||||
type=local,dest=${{ runner.temp }}/dockercache,mode=max
|
||||
outputs: |
|
||||
type=local,dest=${{ runner.temp }}/release
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }}
|
||||
ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }}
|
||||
ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }}
|
||||
ZIG_OPTIMIZE=${{ inputs.zig-optimize }}
|
||||
CANARY=${{ inputs.canary && '1' || '0' }}
|
||||
- if: ${{ inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-zig
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release/bun-zig.o
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/release
|
||||
chmod +x bun-profile bun
|
||||
mkdir bun-${{ inputs.tag }}-profile
|
||||
mkdir bun-${{ inputs.tag }}
|
||||
strip bun
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
245
.github/workflows/ci.yml
vendored
245
.github/workflows/ci.yml
vendored
@@ -1,245 +0,0 @@
|
||||
name: CI
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run-id:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
|
||||
jobs:
|
||||
format:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Format
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
zig-version: 0.13.0
|
||||
permissions:
|
||||
contents: write
|
||||
lint:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Lint
|
||||
uses: ./.github/workflows/run-lint.yml
|
||||
secrets: inherit
|
||||
linux-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
no-cache: true
|
||||
linux-aarch64:
|
||||
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
no-cache: true
|
||||
darwin-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
darwin-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
darwin-aarch64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
windows-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
windows-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
linux-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64
|
||||
needs: linux-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
linux-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64-baseline
|
||||
needs: linux-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
linux-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
|
||||
name: Test linux-aarch64
|
||||
needs: linux-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
darwin-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64
|
||||
needs: darwin-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
darwin-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64-baseline
|
||||
needs: darwin-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
darwin-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-aarch64
|
||||
needs: darwin-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
windows-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64
|
||||
needs: windows-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
windows-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64-baseline
|
||||
needs: windows-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
cleanup:
|
||||
if: ${{ always() }}
|
||||
name: Cleanup
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cleanup Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
bun-*-cpp
|
||||
bun-*-zig
|
||||
bun-*-deps
|
||||
bun-*-codegen
|
||||
55
.github/workflows/comment.yml
vendored
55
.github/workflows/comment.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Tests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*-tests
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
echo "pr-number=$(<bun/bun-linux-x64-tests/pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
- name: Generate Comment
|
||||
run: |
|
||||
cat bun/bun-*-tests/comment.md > comment.md
|
||||
if [ -s comment.md ]; then
|
||||
echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md
|
||||
else
|
||||
echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md
|
||||
fi
|
||||
echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md
|
||||
echo -e "<!-- generated-comment workflow=${{ github.workflow }} -->" >> comment.md
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
- name: Write Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
183
.github/workflows/create-release-build.yml
vendored
183
.github/workflows/create-release-build.yml
vendored
@@ -1,183 +0,0 @@
|
||||
name: Create Release Build
|
||||
run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }}
|
||||
|
||||
concurrency:
|
||||
group: release
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
required: true
|
||||
description: "Release version. Example: 1.1.4. Exclude the 'v' prefix."
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
description: "GitHub tag to use"
|
||||
clobber:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Overwrite existing release artifacts?"
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
|
||||
jobs:
|
||||
notify-start:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Notify Start
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
linux-x64:
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
linux-x64-baseline:
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
linux-aarch64:
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
darwin-x64:
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
darwin-x64-baseline:
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
darwin-aarch64:
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
windows-x64:
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
windows-x64-baseline:
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
|
||||
upload-artifacts:
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun-releases
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: "Bun v${{ inputs.version }} release artifacts uploaded"
|
||||
- name: "Upload Artifacts"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Unzip one level deep each artifact
|
||||
cd bun-releases
|
||||
for f in *.zip; do
|
||||
unzip -o $f
|
||||
done
|
||||
cd ..
|
||||
gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip
|
||||
9
.github/workflows/lint-cpp.yml
vendored
9
.github/workflows/lint-cpp.yml
vendored
@@ -14,10 +14,11 @@ on:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
paths:
|
||||
- ".github/workflows/lint-cpp.yml"
|
||||
- "**/*.cpp"
|
||||
- "src/deps/**/*"
|
||||
- "CMakeLists.txt"
|
||||
|
||||
jobs:
|
||||
lint-cpp:
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
29
.github/workflows/release.yml
vendored
29
.github/workflows/release.yml
vendored
@@ -1,3 +1,6 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
@@ -63,7 +66,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -88,7 +91,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -117,7 +120,7 @@ jobs:
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
@@ -259,7 +262,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -270,6 +273,24 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
4
.github/workflows/run-format.yml
vendored
4
.github/workflows/run-format.yml
vendored
@@ -29,9 +29,9 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.8"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ inputs.zig-version }}
|
||||
- name: Install Dependencies
|
||||
|
||||
2
.github/workflows/run-lint-cpp.yml
vendored
2
.github/workflows/run-lint-cpp.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
jobs:
|
||||
lint-cpp:
|
||||
name: Lint C++
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
224
.github/workflows/run-test.yml
vendored
224
.github/workflows/run-test.yml
vendored
@@ -1,224 +0,0 @@
|
||||
name: Test
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
pr-number:
|
||||
type: string
|
||||
required: true
|
||||
run-id:
|
||||
type: string
|
||||
default: ${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Tests
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
package.json
|
||||
bun.lockb
|
||||
test
|
||||
packages/bun-internal-test
|
||||
packages/bun-types
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- name: Download pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 8
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install
|
||||
- name: Install Dependencies (test)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install Dependencies (runner)
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install --cwd packages/bun-internal-test
|
||||
- name: Run Tests
|
||||
id: test
|
||||
timeout-minutes: 90
|
||||
shell: bash
|
||||
env:
|
||||
IS_BUN_CI: 1
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_TAG: ${{ inputs.tag }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
|
||||
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
|
||||
SHELLOPTS: igncr
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
|
||||
- if: ${{ always() }}
|
||||
name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-tests
|
||||
path: |
|
||||
test-report.*
|
||||
comment.md
|
||||
pr-number.txt
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
|
||||
name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
- name: Fail
|
||||
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
|
||||
run: |
|
||||
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
|
||||
exit 1
|
||||
test-node:
|
||||
name: Node.js Tests
|
||||
# TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time.
|
||||
if: 0
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
test/node.js
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Checkout Tests
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
node runner.mjs --pull
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
bun install
|
||||
- name: Run Tests
|
||||
timeout-minutes: 10 # Increase when more tests are added
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
env:
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
run: |
|
||||
node runner.mjs
|
||||
- name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-node-tests
|
||||
path: |
|
||||
test/node.js/summary/*.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
82
.github/workflows/upload.yml
vendored
82
.github/workflows/upload.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Upload Artifacts
|
||||
run-name: Canary release ${{github.sha}} upload
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Upload Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Upload to GitHub Releases
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: canary
|
||||
name: Canary (${{ github.sha }})
|
||||
prerelease: true
|
||||
body: This canary release of Bun corresponds to the commit [${{ github.sha }}]
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: bun/**/bun-*.zip
|
||||
token: ${{ github.token }}
|
||||
- name: Upload to S3 (using SHA)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
- name: Upload to S3 (using tag)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/canary
|
||||
- name: Announce on Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "New Bun Canary available"
|
||||
url: https://github.com/oven-sh/bun/commit/${{ github.sha }}
|
||||
description: |
|
||||
A new canary build of Bun has been automatically uploaded. To upgrade, run:
|
||||
```sh
|
||||
bun upgrade --canary
|
||||
# bun upgrade --stable <- to downgrade
|
||||
```
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -145,3 +145,4 @@ zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
|
||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -82,3 +82,7 @@ url = https://github.com/oven-sh/zig
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/libdeflate"]
|
||||
path = src/deps/libdeflate
|
||||
url = https://github.com/ebiggers/libdeflate
|
||||
ignore = "dirty"
|
||||
|
||||
4
.lldbinit
Normal file
4
.lldbinit
Normal file
@@ -0,0 +1,4 @@
|
||||
command script import src/deps/zig/tools/lldb_pretty_printers.py
|
||||
command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
9
.vscode/launch.json
generated
vendored
9
.vscode/launch.json
generated
vendored
@@ -17,6 +17,7 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
@@ -445,8 +446,8 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [*] (ci)",
|
||||
"program": "node",
|
||||
"args": ["src/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}/packages/bun-internal-test",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
@@ -1093,8 +1094,8 @@
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [*] (ci)",
|
||||
"program": "node",
|
||||
"args": ["src/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}/packages/bun-internal-test",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
},
|
||||
],
|
||||
|
||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -27,7 +27,7 @@
|
||||
// Zig
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "src/deps/zig/lib",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/src/deps/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/codegen"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
@@ -42,8 +42,11 @@
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
},
|
||||
|
||||
// C++
|
||||
// lldb
|
||||
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
|
||||
"lldb.verboseLogging": false,
|
||||
|
||||
// C++
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
|
||||
209
CMakeLists.txt
209
CMakeLists.txt
@@ -2,8 +2,9 @@ cmake_minimum_required(VERSION 3.22)
|
||||
cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(Bun_VERSION "1.1.17")
|
||||
set(WEBKIT_TAG 5bbfe7e880090b9d0b5feaf3563e85957dd7b10d)
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
|
||||
set(Bun_VERSION "1.1.22")
|
||||
set(WEBKIT_TAG f9a0fda2d2b2fd001a00bfcf8e7917a56b382516)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -14,7 +15,6 @@ set(CMAKE_C_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_C_STANDARD_REQUIRED ON)
|
||||
|
||||
# Should not start with v
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
set(REPORTED_NODEJS_VERSION "22.3.0")
|
||||
|
||||
@@ -22,6 +22,7 @@ set(REPORTED_NODEJS_VERSION "22.3.0")
|
||||
# If we do not set this, it will crash at startup on the first memory allocation.
|
||||
if(NOT WIN32 AND NOT APPLE)
|
||||
set(CMAKE_CXX_EXTENSIONS ON)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE FALSE)
|
||||
endif()
|
||||
|
||||
# --- Build Type ---
|
||||
@@ -38,6 +39,13 @@ else()
|
||||
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
|
||||
# workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280
|
||||
# './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved
|
||||
# as part of some file paths
|
||||
set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:")
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(DEBUG ON)
|
||||
set(DEFAULT_ZIG_OPTIMIZE "Debug")
|
||||
@@ -50,11 +58,8 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
|
||||
|
||||
if(WIN32)
|
||||
# lld-link will strip it for you, so we can build directly to bun.exe
|
||||
# Debug symbols are in a separate file: bun.pdb
|
||||
set(bun "bun")
|
||||
|
||||
# TODO(@paperdave): Remove this
|
||||
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
|
||||
else()
|
||||
if(ZIG_OPTIMIZE STREQUAL "Debug")
|
||||
@@ -67,7 +72,7 @@ endif()
|
||||
|
||||
# --- MacOS SDK ---
|
||||
if(APPLE AND DEFINED ENV{CI})
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0")
|
||||
endif()
|
||||
|
||||
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
@@ -111,7 +116,11 @@ endif()
|
||||
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
|
||||
#
|
||||
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
|
||||
set(LLVM_VERSION 16)
|
||||
if(WIN32 OR APPLE)
|
||||
set(LLVM_VERSION 18)
|
||||
else()
|
||||
set(LLVM_VERSION 16)
|
||||
endif()
|
||||
|
||||
macro(BUN_FIND_LLVM)
|
||||
find_program(
|
||||
@@ -145,11 +154,12 @@ macro(BUN_FIND_LLVM)
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
)
|
||||
|
||||
find_program(
|
||||
STRIP
|
||||
NAMES strip
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
DOC "Path to strip binary"
|
||||
)
|
||||
find_program(
|
||||
DSYMUTIL
|
||||
@@ -300,6 +310,7 @@ endif()
|
||||
# -- Build Flags --
|
||||
option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE})
|
||||
option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ON)
|
||||
option(USE_CUSTOM_LIBDEFLATE "Use Bun's recommended version of libdeflate" ON)
|
||||
option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON)
|
||||
option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON)
|
||||
option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON)
|
||||
@@ -321,6 +332,16 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
|
||||
|
||||
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(APPLE AND USE_LTO)
|
||||
set(USE_LTO OFF)
|
||||
message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND USE_LTO)
|
||||
set(CMAKE_LINKER_TYPE LLD)
|
||||
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF)
|
||||
endif()
|
||||
|
||||
option(BUN_TIDY_ONLY "Only run clang-tidy" OFF)
|
||||
option(BUN_TIDY_ONLY_EXTRA " Only run clang-tidy, with extra checks for local development" OFF)
|
||||
|
||||
@@ -614,7 +635,7 @@ set(BUN_DEPS_DIR "${BUN_SRC}/deps")
|
||||
set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen")
|
||||
|
||||
if(NOT BUN_DEPS_OUT_DIR)
|
||||
set(BUN_DEPS_OUT_DIR "${BUN_DEPS_DIR}")
|
||||
set(BUN_DEPS_OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/build/bun-deps")
|
||||
endif()
|
||||
|
||||
set(BUN_RAW_SOURCES, "")
|
||||
@@ -632,16 +653,6 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
# -- uSockets --
|
||||
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
|
||||
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
|
||||
@@ -772,6 +783,8 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/internal/util/*.js"
|
||||
"${BUN_SRC}/js/internal/fs/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
@@ -855,13 +868,24 @@ file(GLOB ZIG_FILES
|
||||
"${BUN_SRC}/*/*/*/*/*.zig"
|
||||
)
|
||||
|
||||
if(NOT BUN_ZIG_OBJ_FORMAT)
|
||||
# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
|
||||
# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
|
||||
# Change to "bc" to experiment, "Invalid record" means it is not valid output.
|
||||
set(BUN_ZIG_OBJ_FORMAT "obj")
|
||||
endif()
|
||||
|
||||
if(NOT BUN_ZIG_OBJ_DIR)
|
||||
set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles")
|
||||
endif()
|
||||
|
||||
get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
|
||||
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
if(WIN32)
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
else()
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
endif()
|
||||
|
||||
set(USES_TERMINAL_NOT_IN_CI "")
|
||||
|
||||
@@ -876,6 +900,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"${ZIG_COMPILER}" "build" "obj"
|
||||
"--zig-lib-dir" "${ZIG_LIB_DIR}"
|
||||
"--prefix" "${BUN_ZIG_OBJ_DIR}"
|
||||
"--verbose"
|
||||
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
|
||||
"-freference-trace=10"
|
||||
"-Dversion=${Bun_VERSION}"
|
||||
@@ -885,6 +910,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"-Dtarget=${ZIG_TARGET}"
|
||||
"-Denable_logs=${ENABLE_LOGS}"
|
||||
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
|
||||
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
|
||||
DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
|
||||
"${ZIG_FILES}"
|
||||
@@ -948,12 +974,15 @@ set_target_properties(${bun} PROPERTIES
|
||||
VISIBILITY_INLINES_HIDDEN YES
|
||||
)
|
||||
|
||||
if(APPLE)
|
||||
add_compile_definitions("__DARWIN_NON_CANCELABLE=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions(
|
||||
|
||||
# TODO: are all of these variables strictly necessary?
|
||||
"_HAS_EXCEPTIONS=0"
|
||||
"LIBUS_USE_OPENSSL=1"
|
||||
"UWS_HTTPRESPONSE_NO_WRITEMARK=1"
|
||||
"LIBUS_USE_BORINGSSL=1"
|
||||
"WITH_BORINGSSL=1"
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
|
||||
@@ -971,8 +1000,20 @@ add_compile_definitions(
|
||||
)
|
||||
|
||||
if(NOT ASSERT_ENABLED)
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("NDEBUG=1")
|
||||
else()
|
||||
if(APPLE)
|
||||
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1")
|
||||
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG")
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
add_compile_definitions("_GLIBCXX_ASSERTIONS=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions("ASSERT_ENABLED=1")
|
||||
endif()
|
||||
|
||||
@@ -1058,7 +1099,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
if(NOT WIN32)
|
||||
if(USE_LTO)
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables")
|
||||
endif()
|
||||
|
||||
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
|
||||
@@ -1077,13 +1118,38 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(LTO_LINK_FLAG "")
|
||||
|
||||
if(USE_LTO)
|
||||
# -emit-llvm seems to not be supported or under a different name on Windows.
|
||||
target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc)
|
||||
|
||||
list(APPEND LTO_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "/LTCG")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:REF")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:NOICF")
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG})
|
||||
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG:FULL)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
/O2
|
||||
${LTO_FLAG}
|
||||
/Gy
|
||||
/Gw
|
||||
/GF
|
||||
/GA
|
||||
)
|
||||
target_link_options(${bun} PUBLIC
|
||||
${LTO_LINK_FLAG}
|
||||
/DEBUG:FULL
|
||||
|
||||
/delayload:ole32.dll
|
||||
/delayload:WINMM.dll
|
||||
/delayload:dbghelp.dll
|
||||
/delayload:VCRUNTIME140_1.dll
|
||||
|
||||
# libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them
|
||||
/delayload:WS2_32.dll
|
||||
/delayload:WSOCK32.dll
|
||||
/delayload:ADVAPI32.dll
|
||||
/delayload:IPHLPAPI.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -1101,6 +1167,11 @@ else()
|
||||
# On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already.
|
||||
target_compile_options(${bun} PUBLIC "-mcpu=apple-m1")
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64")
|
||||
# on arm64 linux, we set a minimum of armv8
|
||||
target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT})
|
||||
@@ -1114,23 +1185,29 @@ if(WIN32)
|
||||
"BORINGSSL_NO_CXX=1" # lol
|
||||
)
|
||||
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def")
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors)
|
||||
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0")
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fPIC
|
||||
-mtune=${CPU_TARGET}
|
||||
-fconstexpr-steps=2542484
|
||||
-fconstexpr-depth=54
|
||||
-fno-exceptions
|
||||
-fno-asynchronous-unwind-tables
|
||||
-fno-unwind-tables
|
||||
-fno-c++-static-destructors
|
||||
-fvisibility=hidden
|
||||
-fvisibility-inlines-hidden
|
||||
-fno-rtti
|
||||
-fno-omit-frame-pointer
|
||||
-mno-omit-leaf-frame-pointer
|
||||
-fno-pic
|
||||
-fno-pie
|
||||
-faddrsig
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1140,17 +1217,18 @@ if(APPLE)
|
||||
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
|
||||
target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt")
|
||||
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt")
|
||||
|
||||
target_link_options(${bun} PUBLIC "-fno-keep-static-consts")
|
||||
target_link_libraries(${bun} PRIVATE "resolv")
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
"-fuse-ld=lld"
|
||||
"-static-libstdc++"
|
||||
"-static-libgcc"
|
||||
"-Wl,-z,now"
|
||||
-fuse-ld=lld-${LLVM_VERSION}
|
||||
-fno-pic
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
"-Wl,-no-pie"
|
||||
"-Wl,-icf=safe"
|
||||
"-Wl,--as-needed"
|
||||
"-Wl,--gc-sections"
|
||||
"-Wl,-z,stack-size=12800000"
|
||||
@@ -1179,6 +1257,8 @@ if(UNIX AND NOT APPLE)
|
||||
"-rdynamic"
|
||||
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
|
||||
"-Wl,--version-script=${BUN_SRC}/linker.lds"
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE "c")
|
||||
@@ -1212,12 +1292,16 @@ endif()
|
||||
|
||||
# --- Stripped Binary "bun"
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
|
||||
# add_custom_command(
|
||||
# TARGET ${bun}
|
||||
# POST_BUILD
|
||||
# COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
|
||||
# COMMENT "Stripping Symbols"
|
||||
# )
|
||||
# if(CI AND APPLE)
|
||||
if(APPLE)
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun}
|
||||
COMMENT "Generating .dSYM"
|
||||
)
|
||||
endif()
|
||||
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
@@ -1287,6 +1371,19 @@ else()
|
||||
target_link_libraries(${bun} PRIVATE LibArchive::LibArchive)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_LIBDEFLATE)
|
||||
include_directories(${BUN_DEPS_DIR}/libdeflate)
|
||||
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/deflate.lib")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdeflate.a")
|
||||
endif()
|
||||
else()
|
||||
find_package(LibDeflate REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE LibDeflate::LibDeflate)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_MIMALLOC)
|
||||
include_directories(${BUN_DEPS_DIR}/mimalloc/include)
|
||||
|
||||
@@ -1385,6 +1482,11 @@ if(USE_STATIC_SQLITE)
|
||||
"SQLITE_ENABLE_JSON1=1"
|
||||
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(sqlite3 PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE sqlite3)
|
||||
message(STATUS "Using static sqlite3")
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
|
||||
@@ -1393,6 +1495,24 @@ else()
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
|
||||
endif()
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
add_library(brotli STATIC ${BROTLI_FILES})
|
||||
target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include")
|
||||
target_compile_definitions(brotli PRIVATE "BROTLI_STATIC")
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(brotli PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE brotli)
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
if(USE_CUSTOM_LSHPACK)
|
||||
include_directories(${BUN_DEPS_DIR}/ls-hpack)
|
||||
|
||||
@@ -1412,7 +1532,6 @@ if(NOT WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
else()
|
||||
target_link_options(${bun} PRIVATE "-static")
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
"${WEBKIT_LIB_DIR}/WTF.lib"
|
||||
"${WEBKIT_LIB_DIR}/JavaScriptCore.lib"
|
||||
@@ -1422,10 +1541,10 @@ else()
|
||||
winmm
|
||||
bcrypt
|
||||
ntdll
|
||||
ucrt
|
||||
userenv
|
||||
dbghelp
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
|
||||
{% /details %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
@@ -107,7 +112,7 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> ⚠️ Ubuntu distributions may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
|
||||
## Building Bun
|
||||
|
||||
@@ -311,3 +316,12 @@ $ bun setup -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
## ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
72
Dockerfile
72
Dockerfile
@@ -52,11 +52,8 @@ ENV CI 1
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV BUILDARCH=${BUILDARCH}
|
||||
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
|
||||
ENV BUN_ENABLE_LTO 1
|
||||
|
||||
ENV CXX=clang++-${LLVM_VERSION}
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
|
||||
ENV LD=lld-${LLVM_VERSION}
|
||||
ENV LC_CTYPE=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
@@ -93,6 +90,8 @@ RUN install_packages \
|
||||
clangd-${LLVM_VERSION} \
|
||||
libc++-${LLVM_VERSION}-dev \
|
||||
libc++abi-${LLVM_VERSION}-dev \
|
||||
llvm-${LLVM_VERSION}-runtime \
|
||||
llvm-${LLVM_VERSION}-dev \
|
||||
make \
|
||||
cmake \
|
||||
ninja-build \
|
||||
@@ -119,6 +118,15 @@ RUN install_packages \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c89 \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c99 \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++ \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/g++ \
|
||||
&& ln -sf /usr/bin/llvm-ar /usr/bin/ar \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/gcc \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
@@ -131,6 +139,7 @@ RUN install_packages \
|
||||
&& ln -s /usr/bin/bun /usr/bin/bunx \
|
||||
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
|
||||
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
|
||||
# echo "Setting up sccache" \
|
||||
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
|
||||
@@ -167,13 +176,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make c-ares \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
|
||||
&& bash ./scripts/build-cares.sh \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts
|
||||
|
||||
FROM bun-base as lolhtml
|
||||
|
||||
@@ -204,13 +214,14 @@ ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make mimalloc \
|
||||
&& bash ./scripts/build-mimalloc.sh \
|
||||
&& rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bun-base as mimalloc-debug
|
||||
@@ -240,14 +251,38 @@ ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zlib \
|
||||
&& rm -rf src/deps/zlib Makefile
|
||||
&& bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts
|
||||
|
||||
|
||||
FROM bun-base as libdeflate
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& bash ./scripts/build-libdeflate.sh && rm -rf src/deps/libdeflate scripts
|
||||
|
||||
|
||||
FROM bun-base as libarchive
|
||||
|
||||
@@ -286,6 +321,7 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
@@ -295,7 +331,7 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make boringssl \
|
||||
&& bash ./scripts/build-boringssl.sh \
|
||||
&& rm -rf src/deps/boringssl Makefile
|
||||
|
||||
|
||||
@@ -311,12 +347,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zstd
|
||||
&& bash ./scripts/build-zstd.sh \
|
||||
&& rm -rf src/deps/zstd scripts
|
||||
|
||||
FROM bun-base as ls-hpack
|
||||
|
||||
@@ -330,12 +368,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make lshpack
|
||||
&& bash ./scripts/build-lshpack.sh \
|
||||
&& rm -rf src/deps/ls-hpack scripts
|
||||
|
||||
FROM bun-base-with-zig as bun-identifier-cache
|
||||
|
||||
@@ -393,6 +433,9 @@ COPY src ${BUN_DIR}/src
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
|
||||
|
||||
# for uWebSockets
|
||||
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
@@ -491,11 +534,13 @@ RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libdeflate ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
@@ -505,7 +550,8 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
|
||||
@@ -34,6 +34,8 @@ Bun statically links these libraries:
|
||||
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
|
||||
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
|
||||
18
Makefile
18
Makefile
@@ -26,8 +26,11 @@ ifeq ($(ARCH_NAME_RAW),arm64)
|
||||
ARCH_NAME = aarch64
|
||||
DOCKER_BUILDARCH = arm64
|
||||
BREW_PREFIX_PATH = /opt/homebrew
|
||||
DEFAULT_MIN_MACOS_VERSION = 11.0
|
||||
DEFAULT_MIN_MACOS_VERSION = 13.0
|
||||
MARCH_NATIVE = -mtune=$(CPU_TARGET)
|
||||
ifeq ($(OS_NAME),linux)
|
||||
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
|
||||
endif
|
||||
else
|
||||
ARCH_NAME = x64
|
||||
DOCKER_BUILDARCH = amd64
|
||||
@@ -129,7 +132,7 @@ SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
|
||||
|
||||
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
|
||||
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
|
||||
BUN_DEPS_OUT_DIR ?= $(BUN_DEPS_DIR)
|
||||
BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/bun-deps
|
||||
CPU_COUNT = 2
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
CPU_COUNT = $(shell sysctl -n hw.logicalcpu)
|
||||
@@ -154,7 +157,12 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
|
||||
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
|
||||
-DCMAKE_AR=$(AR) \
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_CXX_STANDARD=20 \
|
||||
-DCMAKE_C_STANDARD=17 \
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_C_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_CXX_EXTENSIONS=ON
|
||||
|
||||
|
||||
|
||||
@@ -181,8 +189,8 @@ endif
|
||||
|
||||
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
|
||||
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
|
||||
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,20 +1,43 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { run, bench, group } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "bun";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
const libraries = ["zlib"];
|
||||
if (Bun.semver.satisfies(Bun.version.replaceAll("-debug", ""), ">=1.1.21")) {
|
||||
libraries.push("libdeflate");
|
||||
}
|
||||
const options = { library: undefined };
|
||||
const benchFn = (name, fn) => {
|
||||
if (libraries.length > 1) {
|
||||
group(name, () => {
|
||||
for (const library of libraries) {
|
||||
bench(library, () => {
|
||||
options.library = library;
|
||||
fn();
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
options.library = libraries[0];
|
||||
bench(name, () => {
|
||||
fn();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
benchFn(`roundtrip - @babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(gzipSync(data, options), options);
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
benchFn(`gzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gzipSync(data, options);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
benchFn(`gunzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(compressed, options);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
BIN
bench/gzip/bun.lockb
Executable file
BIN
bench/gzip/bun.lockb
Executable file
Binary file not shown.
@@ -1,19 +1,22 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "zlib";
|
||||
import { createRequire } from "module";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
const require = createRequire(import.meta.url);
|
||||
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
bench(`roundtrip - @babel/standalone/babel.min.js)`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
bench(`gzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
bench(`gunzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
|
||||
@@ -7,5 +7,8 @@
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/standalone": "7.24.10"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@babel/standalone": "^7.24.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"benchmark": "^2.1.4",
|
||||
"braces": "^3.0.2",
|
||||
|
||||
@@ -6,6 +6,7 @@ const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
14
bench/snippets/transpiler-2.mjs
Normal file
14
bench/snippets/transpiler-2.mjs
Normal file
@@ -0,0 +1,14 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { join } from "path";
|
||||
|
||||
const code = require("fs").readFileSync(
|
||||
process.argv[2] || join(import.meta.dir, "../node_modules/@babel/standalone/babel.min.js"),
|
||||
);
|
||||
|
||||
const transpiler = new Bun.Transpiler({ minify: true });
|
||||
|
||||
bench("transformSync", () => {
|
||||
transpiler.transformSync(code);
|
||||
});
|
||||
|
||||
await run();
|
||||
31
bench/sqlite/better-sqlite3.mjs
Normal file
31
bench/sqlite/better-sqlite3.mjs
Normal file
@@ -0,0 +1,31 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const db = require("better-sqlite3")("./src/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
bench('SELECT * FROM "Order"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Product"`);
|
||||
|
||||
bench('SELECT * FROM "Product"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "OrderDetail"`);
|
||||
|
||||
bench('SELECT * FROM "OrderDetail"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,8 +1,9 @@
|
||||
// Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script.
|
||||
// You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher.
|
||||
import { run, bench } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
import { DatabaseSync as Database } from "node:sqlite";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const db = require("better-sqlite3")("./src/northwind.sqlite");
|
||||
const db = new Database("./src/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
181
build.zig
181
build.zig
@@ -33,8 +33,6 @@ comptime {
|
||||
}
|
||||
}
|
||||
|
||||
const default_reported_nodejs_version = "22.3.0";
|
||||
|
||||
const zero_sha = "0000000000000000000000000000000000000000";
|
||||
|
||||
const BunBuildOptions = struct {
|
||||
@@ -48,9 +46,10 @@ const BunBuildOptions = struct {
|
||||
sha: []const u8,
|
||||
enable_logs: bool = false,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: []const u8 = default_reported_nodejs_version,
|
||||
reported_nodejs_version: Version,
|
||||
|
||||
generated_code_dir: []const u8,
|
||||
no_llvm: bool,
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
@@ -73,14 +72,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM"));
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption([:0]const u8, "reported_nodejs_version", b.allocator.dupeZ(u8, this.reported_nodejs_version) catch @panic("OOM"));
|
||||
if (this.reported_nodejs_version.len > 0 and this.reported_nodejs_version[0] == 'v') {
|
||||
@panic("Node.js version should not start with 'v'");
|
||||
}
|
||||
|
||||
if (this.reported_nodejs_version.len == 0) {
|
||||
@panic("Node.js version should not be empty");
|
||||
}
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
|
||||
const mod = opts.createModule();
|
||||
this.cached_options_module = mod;
|
||||
@@ -122,8 +114,25 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel {
|
||||
// https://github.com/oven-sh/bun/issues/12076
|
||||
if (os == .linux and arch == .aarch64) {
|
||||
return .{ .explicit = &Target.aarch64.cpu.cortex_a35 };
|
||||
}
|
||||
|
||||
// Be explicit and ensure we do not accidentally target a newer M-series chip
|
||||
if (os == .mac and arch == .aarch64) {
|
||||
return .{ .explicit = &Target.aarch64.cpu.apple_m1 };
|
||||
}
|
||||
|
||||
// note: x86_64 is dealt with in the CMake config and passed in.
|
||||
// the reason for the explicit handling on aarch64 is due to troubles
|
||||
// passing the exact target in via flags.
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn build(b: *Build) !void {
|
||||
std.debug.print("zig build v{s}\n", .{builtin.zig_version_string});
|
||||
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
|
||||
|
||||
b.zig_lib_dir = b.zig_lib_dir orelse b.path("src/deps/zig/lib");
|
||||
|
||||
@@ -147,6 +156,14 @@ pub fn build(b: *Build) !void {
|
||||
break :brk .{ os, arch };
|
||||
};
|
||||
|
||||
// target must be refined to support older but very popular devices on
|
||||
// aarch64, this means moving the minimum supported CPU to support certain
|
||||
// raspberry PIs. there are also a number of cloud hosts that use virtual
|
||||
// machines with surprisingly out of date versions of glibc.
|
||||
if (getCpuModel(os, arch)) |cpu_model| {
|
||||
target_query.cpu_model = cpu_model;
|
||||
}
|
||||
|
||||
target_query.os_version_min = getOSVersionMin(os);
|
||||
target_query.glibc_version = getOSGlibCVersion(os);
|
||||
|
||||
@@ -163,6 +180,10 @@ pub fn build(b: *Build) !void {
|
||||
break :ref_trace if (trace == 0) null else trace;
|
||||
};
|
||||
|
||||
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
|
||||
|
||||
const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false;
|
||||
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
@@ -171,6 +192,7 @@ pub fn build(b: *Build) !void {
|
||||
.arch = arch,
|
||||
|
||||
.generated_code_dir = generated_code_dir,
|
||||
.no_llvm = no_llvm,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
@@ -178,7 +200,10 @@ pub fn build(b: *Build) !void {
|
||||
break :canary if (rev == 0) null else rev;
|
||||
},
|
||||
|
||||
.reported_nodejs_version = b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse default_reported_nodejs_version,
|
||||
.reported_nodejs_version = try Version.parse(
|
||||
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
|
||||
"0.0.0-unset",
|
||||
),
|
||||
|
||||
.sha = sha: {
|
||||
const sha = b.option([]const u8, "sha", "Force the git sha") orelse
|
||||
@@ -224,7 +249,7 @@ pub fn build(b: *Build) !void {
|
||||
var step = b.step("obj", "Build Bun's Zig code as a .o file");
|
||||
var bun_obj = addBunObject(b, &build_options);
|
||||
step.dependOn(&bun_obj.step);
|
||||
step.dependOn(&b.addInstallFile(bun_obj.getEmittedBin(), "bun-zig.o").step);
|
||||
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
|
||||
}
|
||||
|
||||
// zig build windows-shim
|
||||
@@ -252,61 +277,60 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
// zig build check-all
|
||||
{
|
||||
var step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
inline for (.{
|
||||
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
.{ .os = .linux, .arch = .aarch64 },
|
||||
}) |check| {
|
||||
inline for (.{ .Debug, .ReleaseFast }) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
var options = BunBuildOptions{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
.arch = check_target.result.cpu.arch,
|
||||
.optimize = mode,
|
||||
|
||||
.canary_revision = build_options.canary_revision,
|
||||
.sha = build_options.sha,
|
||||
.tracy_callstack_depth = build_options.tracy_callstack_depth,
|
||||
.version = build_options.version,
|
||||
.reported_nodejs_version = build_options.reported_nodejs_version,
|
||||
.generated_code_dir = build_options.generated_code_dir,
|
||||
};
|
||||
var obj = addBunObject(b, &options);
|
||||
obj.generated_bin = null;
|
||||
step.dependOn(&obj.step);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Running `zig build` with no arguments is almost always a mistake.
|
||||
// TODO: revive this error. cannot right now since ZLS runs zig build without arguments
|
||||
// zig build check-windows
|
||||
{
|
||||
// const mistake_message = b.addSystemCommand(&.{
|
||||
// "echo",
|
||||
// \\
|
||||
// \\To build Bun from source, please use `bun run setup` instead of `zig build`"
|
||||
// \\For more info, see https://bun.sh/docs/project/contributing
|
||||
// \\
|
||||
// \\If you want to build the zig code in isolation, run:
|
||||
// \\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
|
||||
// \\
|
||||
// \\If you want to test a compile without emitting an object:
|
||||
// \\ 'zig build check'
|
||||
// \\ 'zig build check-all' (run linux+mac+windows)
|
||||
// \\
|
||||
// });
|
||||
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// b.default_step.dependOn(&mistake_message.step);
|
||||
pub inline fn addMultiCheck(
|
||||
b: *Build,
|
||||
parent_step: *Step,
|
||||
root_build_options: BunBuildOptions,
|
||||
to_check: []const struct { os: OperatingSystem, arch: Arch },
|
||||
) void {
|
||||
inline for (to_check) |check| {
|
||||
inline for (.{ .Debug, .ReleaseFast }) |mode| {
|
||||
const check_target = b.resolveTargetQuery(.{
|
||||
.os_tag = OperatingSystem.stdOSTag(check.os),
|
||||
.cpu_arch = check.arch,
|
||||
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
|
||||
.os_version_min = getOSVersionMin(check.os),
|
||||
.glibc_version = getOSGlibCVersion(check.os),
|
||||
});
|
||||
|
||||
var options: BunBuildOptions = .{
|
||||
.target = check_target,
|
||||
.os = check.os,
|
||||
.arch = check_target.result.cpu.arch,
|
||||
.optimize = mode,
|
||||
|
||||
.canary_revision = root_build_options.canary_revision,
|
||||
.sha = root_build_options.sha,
|
||||
.tracy_callstack_depth = root_build_options.tracy_callstack_depth,
|
||||
.version = root_build_options.version,
|
||||
.reported_nodejs_version = root_build_options.reported_nodejs_version,
|
||||
.generated_code_dir = root_build_options.generated_code_dir,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
};
|
||||
|
||||
var obj = addBunObject(b, &options);
|
||||
obj.generated_bin = null;
|
||||
parent_step.dependOn(&obj.step);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -319,10 +343,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
},
|
||||
.target = opts.target,
|
||||
.optimize = opts.optimize,
|
||||
.use_llvm = !opts.no_llvm,
|
||||
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
|
||||
|
||||
// https://github.com/ziglang/zig/issues/17430
|
||||
.pic = true,
|
||||
|
||||
.omit_frame_pointer = false,
|
||||
.strip = false, // stripped at the end
|
||||
});
|
||||
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.formatted_panics = true;
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
@@ -340,9 +369,10 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
}
|
||||
|
||||
if (opts.os == .linux) {
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_emit_relocs = false;
|
||||
obj.link_eh_frame_hdr = false;
|
||||
obj.link_function_sections = true;
|
||||
obj.link_data_sections = true;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
obj.root_module.valgrind = true;
|
||||
@@ -353,6 +383,25 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const ObjectFormat = enum {
|
||||
bc,
|
||||
obj,
|
||||
};
|
||||
|
||||
pub fn addInstallObjectFile(
|
||||
b: *Build,
|
||||
compile: *Compile,
|
||||
name: []const u8,
|
||||
out_mode: ObjectFormat,
|
||||
) *Step {
|
||||
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
|
||||
const bin = compile.getEmittedBin();
|
||||
return &b.addInstallFile(switch (out_mode) {
|
||||
.obj => bin,
|
||||
.bc => compile.getEmittedLlvmBc(),
|
||||
}, b.fmt("{s}.o", .{name})).step;
|
||||
}
|
||||
|
||||
fn exists(path: []const u8) bool {
|
||||
const file = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
|
||||
file.close();
|
||||
@@ -413,7 +462,11 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
|
||||
fn validateGeneratedPath(path: []const u8) void {
|
||||
if (!exists(path)) {
|
||||
std.debug.panic("{s} does not exist in generated code directory!", .{std.fs.path.basename(path)});
|
||||
std.debug.panic(
|
||||
\\Generated file '{s}' is missing!
|
||||
\\
|
||||
\\Make sure to use CMake and Ninja, or pass a manual codegen folder with '-Dgenerated-code=...'
|
||||
, .{path});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ _bun_completions() {
|
||||
declare -A PACKAGE_OPTIONS;
|
||||
declare -A PM_OPTIONS;
|
||||
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update link unlink build";
|
||||
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
@@ -425,6 +425,7 @@ _bun_run_completion() {
|
||||
'--external[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'-e[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'--loader[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--packages[Exclude dependencies from bundle, e.g. --packages external. Valid options: bundle, external]:packages' \
|
||||
'-l[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--origin[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
'-u[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
|
||||
@@ -61,7 +61,7 @@ To do anything interesting we need a construct known as a "view". A view is a cl
|
||||
|
||||
The `DataView` class is a lower-level interface for reading and manipulating the data in an `ArrayBuffer`.
|
||||
|
||||
Below we create a new `DataView` and set the first byte to 5.
|
||||
Below we create a new `DataView` and set the first byte to 3.
|
||||
|
||||
```ts
|
||||
const buf = new ArrayBuffer(4);
|
||||
@@ -395,7 +395,7 @@ Bun implements `Buffer`, a Node.js API for working with binary data that pre-dat
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello world");
|
||||
// => Buffer(16) [ 116, 104, 105, 115, 32, 105, 115, 32, 97, 32, 115, 116, 114, 105, 110, 103 ]
|
||||
// => Buffer(11) [ 104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100 ]
|
||||
|
||||
buf.length; // => 11
|
||||
buf[0]; // => 104, ascii for 'h'
|
||||
|
||||
308
docs/api/fetch.md
Normal file
308
docs/api/fetch.md
Normal file
@@ -0,0 +1,308 @@
|
||||
Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript.
|
||||
|
||||
Bun also implements `node:http`, but `fetch` is generally recommended instead.
|
||||
|
||||
## Sending an HTTP request
|
||||
|
||||
To send an HTTP request, use `fetch`
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
console.log(response.status); // => 200
|
||||
|
||||
const text = await response.text(); // or response.json(), response.formData(), etc.
|
||||
```
|
||||
|
||||
`fetch` also works with HTTPS URLs.
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://example.com");
|
||||
```
|
||||
|
||||
You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object.
|
||||
|
||||
```ts
|
||||
const request = new Request("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
|
||||
const response = await fetch(request);
|
||||
```
|
||||
|
||||
### Sending a POST request
|
||||
|
||||
To send a POST request, pass an object with the `method` property set to `"POST"`.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
```
|
||||
|
||||
`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information.
|
||||
|
||||
### Proxying requests
|
||||
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
proxy: "http://proxy.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Custom headers
|
||||
|
||||
To set custom headers, pass an object with the `headers` property set to an object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
headers: {
|
||||
"X-Custom-Header": "value",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object.
|
||||
|
||||
```ts
|
||||
const headers = new Headers();
|
||||
headers.append("X-Custom-Header", "value");
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
headers,
|
||||
});
|
||||
```
|
||||
|
||||
### Response bodies
|
||||
|
||||
To read the response body, use one of the following methods:
|
||||
|
||||
- `response.text(): Promise<string>`: Returns a promise that resolves with the response body as a string.
|
||||
- `response.json(): Promise<any>`: Returns a promise that resolves with the response body as a JSON object.
|
||||
- `response.formData(): Promise<FormData>`: Returns a promise that resolves with the response body as a `FormData` object.
|
||||
- `response.bytes(): Promise<Uint8Array>`: Returns a promise that resolves with the response body as a `Uint8Array`.
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`: Returns a promise that resolves with the response body as an `ArrayBuffer`.
|
||||
- `response.blob(): Promise<Blob>`: Returns a promise that resolves with the response body as a `Blob`.
|
||||
|
||||
#### Streaming response bodies
|
||||
|
||||
You can use async iterators to stream the response body.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
for await (const chunk of response.body) {
|
||||
console.log(chunk);
|
||||
}
|
||||
```
|
||||
|
||||
You can also more directly access the `ReadableStream` object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
const stream = response.body;
|
||||
|
||||
const reader = stream.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
```
|
||||
|
||||
### Fetching a URL with a timeout
|
||||
|
||||
To fetch a URL with a timeout, use `AbortSignal.timeout`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: AbortSignal.timeout(1000),
|
||||
});
|
||||
```
|
||||
|
||||
#### Canceling a request
|
||||
|
||||
To cancel a request, use an `AbortController`:
|
||||
|
||||
```ts
|
||||
const controller = new AbortController();
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
```
|
||||
|
||||
### Unix domain sockets
|
||||
|
||||
To fetch a URL using a Unix domain socket, use the `unix: string` option:
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://hostname/a/path", {
|
||||
unix: "/var/run/path/to/unix.sock",
|
||||
method: "POST",
|
||||
body: JSON.stringify({ message: "Hello from Bun!" }),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### TLS
|
||||
|
||||
To use a client certificate, use the `tls` option:
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
key: Bun.file("/path/to/key.pem"),
|
||||
cert: Bun.file("/path/to/cert.pem"),
|
||||
// ca: [Bun.file("/path/to/ca.pem")],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Custom TLS Validation
|
||||
|
||||
To customize the TLS validation, use the `checkServerIdentity` option in `tls`
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
checkServerIdentity: (hostname, peerCertificate) => {
|
||||
// Return an error if the certificate is invalid
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
This is similar to how it works in Node's `net` module.
|
||||
|
||||
## Debugging
|
||||
|
||||
To help with debugging, you can pass `verbose: true` to `fetch`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
verbose: true,
|
||||
});
|
||||
```
|
||||
|
||||
This will print the request and response headers to your terminal:
|
||||
|
||||
```sh
|
||||
[fetch] > HTTP/1.1 GET http://example.com/
|
||||
[fetch] > Connection: keep-alive
|
||||
[fetch] > User-Agent: Bun/1.1.21
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Content-Encoding: gzip
|
||||
[fetch] < Age: 201555
|
||||
[fetch] < Cache-Control: max-age=604800
|
||||
[fetch] < Content-Type: text/html; charset=UTF-8
|
||||
[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Etag: "3147526947+gzip"
|
||||
[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||
[fetch] < Server: ECAcc (sac/254F)
|
||||
[fetch] < Vary: Accept-Encoding
|
||||
[fetch] < X-Cache: HIT
|
||||
[fetch] < Content-Length: 648
|
||||
```
|
||||
|
||||
Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun.
|
||||
|
||||
## Performance
|
||||
|
||||
Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor.
|
||||
|
||||
After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time.
|
||||
|
||||
After the request completes, consuming the response body can also take a significant amount of time and memory.
|
||||
|
||||
At every step of the way, Bun provides APIs to help you optimize the performance of your application.
|
||||
|
||||
### DNS prefetching
|
||||
|
||||
To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup.
|
||||
|
||||
```ts
|
||||
import { dns } from "bun";
|
||||
|
||||
dns.prefetch("bun.sh", 443);
|
||||
```
|
||||
|
||||
#### DNS caching
|
||||
|
||||
By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`:
|
||||
|
||||
To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation.
|
||||
|
||||
### Preconnect to a host
|
||||
|
||||
To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early.
|
||||
|
||||
```ts
|
||||
import { fetch } from "bun";
|
||||
|
||||
fetch.preconnect("https://bun.sh");
|
||||
```
|
||||
|
||||
Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet.
|
||||
|
||||
#### Preconnect at startup
|
||||
|
||||
To preconnect to a host at startup, you can pass `--fetch-preconnect`:
|
||||
|
||||
```sh
|
||||
$ bun --fetch-preconnect https://bun.sh ./my-script.ts
|
||||
```
|
||||
|
||||
This is sort of like `<link rel="preconnect">` in HTML.
|
||||
|
||||
This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows.
|
||||
|
||||
### Connection pooling & HTTP keep-alive
|
||||
|
||||
Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic.
|
||||
|
||||
#### Simultaneous connection limit
|
||||
|
||||
By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons:
|
||||
|
||||
- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash.
|
||||
- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time.
|
||||
|
||||
When the limit is exceeded, the requests are queued and sent as soon as the next request ends.
|
||||
|
||||
You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable:
|
||||
|
||||
```sh
|
||||
$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts
|
||||
```
|
||||
|
||||
The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit.
|
||||
|
||||
### Response buffering
|
||||
|
||||
Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods:
|
||||
|
||||
- `response.text(): Promise<string>`
|
||||
- `response.json(): Promise<any>`
|
||||
- `response.formData(): Promise<FormData>`
|
||||
- `response.bytes(): Promise<Uint8Array>`
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`
|
||||
- `response.blob(): Promise<Blob>`
|
||||
|
||||
You can also use `Bun.write` to write the response body to a file on disk:
|
||||
|
||||
```ts
|
||||
import { write } from "bun";
|
||||
|
||||
await write("output.txt", response);
|
||||
```
|
||||
@@ -756,6 +756,25 @@ $ bun build ./index.tsx --outdir ./out --external '*'
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `packages`
|
||||
|
||||
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.ts'],
|
||||
packages: 'external',
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.ts --packages external
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `naming`
|
||||
|
||||
Customizes the generated file names. Defaults to `./[dir]/[name].[ext]`.
|
||||
|
||||
@@ -94,8 +94,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--packages`
|
||||
- n/a
|
||||
- Not supported
|
||||
- `--packages`
|
||||
- No differences
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -35,6 +35,10 @@ $ bun add --optional lodash
|
||||
|
||||
## `--exact`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-E`
|
||||
{% /callout %}
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -15,7 +15,7 @@ To _containerize_ our application, we define a `Dockerfile`. This file contains
|
||||
```docker#Dockerfile
|
||||
# use the official Bun image
|
||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||
FROM oven/bun:1 as base
|
||||
FROM oven/bun:1 AS base
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# install dependencies into temp directory
|
||||
|
||||
@@ -69,7 +69,7 @@ export const movies = sqliteTable("movies", {
|
||||
We can use the `drizzle-kit` CLI to generate an initial SQL migration.
|
||||
|
||||
```sh
|
||||
$ bunx drizzle-kit generate:sqlite --schema ./schema.ts
|
||||
$ bunx drizzle-kit generate --dialect sqlite --schema ./schema.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -13,7 +13,7 @@ console.log(Bun.argv);
|
||||
Running this file with arguments results in the following:
|
||||
|
||||
```sh
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
[ '/path/to/bun', '/path/to/cli.ts', '--flag1', '--flag2', 'value' ]
|
||||
```
|
||||
|
||||
@@ -47,7 +47,7 @@ console.log(positionals);
|
||||
then it outputs
|
||||
|
||||
```
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
{
|
||||
flag1: true,
|
||||
flag2: "value",
|
||||
|
||||
@@ -16,7 +16,7 @@ await proc.exited;
|
||||
The second argument accepts a configuration object.
|
||||
|
||||
```ts
|
||||
const proc = Bun.spawn("echo", ["Hello, world!"], {
|
||||
const proc = Bun.spawn(["echo", "Hello, world!"], {
|
||||
cwd: "/tmp",
|
||||
env: { FOO: "bar" },
|
||||
onExit(proc, exitCode, signalCode, error) {
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v4
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
+ - uses: oven-sh/setup-bun@v2
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
+ - run: bun install
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
+ with:
|
||||
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
|
||||
```
|
||||
|
||||
@@ -5,8 +5,8 @@ name: Define and replace static globals & constants
|
||||
The `--define` flag lets you declare statically-analyzable constants and globals. It replace all usages of an identifier or property in a JavaScript or TypeScript file with a constant value. This feature is supported at runtime and also in `bun build`. This is sort of similar to `#define` in C/C++, except for JavaScript.
|
||||
|
||||
```ts
|
||||
bun --define:process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
bun build --define:process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
bun --define process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
bun build --define process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
```
|
||||
|
||||
---
|
||||
@@ -95,7 +95,7 @@ To replace all usages of `AWS` with the JSON object `{"ACCESS_KEY":"abc","SECRET
|
||||
|
||||
```sh
|
||||
# JSON
|
||||
bun --define:AWS='{"ACCESS_KEY":"abc","SECRET_KEY":"def"}' src/index.ts
|
||||
bun --define AWS='{"ACCESS_KEY":"abc","SECRET_KEY":"def"}' src/index.ts
|
||||
```
|
||||
|
||||
Those will be transformed into the equivalent JavaScript code.
|
||||
@@ -119,7 +119,7 @@ You can also pass properties to the `--define` flag.
|
||||
For example, to replace all usages of `console.write` with `console.log`, you can use the following command (requires Bun v1.1.5 or later)
|
||||
|
||||
```sh
|
||||
bun --define:console.write=console.log src/index.ts
|
||||
bun --define console.write=console.log src/index.ts
|
||||
```
|
||||
|
||||
That transforms the following input:
|
||||
|
||||
@@ -52,7 +52,7 @@ Different thresholds can be set for line-level and function-level coverage.
|
||||
```toml
|
||||
[test]
|
||||
# to set different thresholds for lines and functions
|
||||
coverageThreshold = { line = 0.5, function = 0.7 }
|
||||
coverageThreshold = { lines = 0.5, functions = 0.7 }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
|
||||
|
||||
{% details summary="Configuring cache behavior" (bunfig.toml) %}
|
||||
{% details summary="Configuring cache behavior (bunfig.toml)" %}
|
||||
|
||||
```toml
|
||||
[install.cache]
|
||||
@@ -15,8 +15,6 @@ disable = false
|
||||
disableManifest = false
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Minimizing re-downloads
|
||||
|
||||
Bun strives to avoid re-downloading packages multiple times. When installing a package, if the cache already contains a version in the range specified by `package.json`, Bun will use the cached package instead of downloading it again.
|
||||
|
||||
75
docs/install/npmrc.md
Normal file
75
docs/install/npmrc.md
Normal file
@@ -0,0 +1,75 @@
|
||||
Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.com/cli/v10/configuring-npm/npmrc) files, allowing you to reuse existing registry/scope configurations.
|
||||
|
||||
{% callout %}
|
||||
|
||||
**NOTE**: We recommend migrating your `.npmrc` file to Bun's [`bunfig.toml`](/docs/runtime/bunfig) format, as it provides more flexible options and can let you configure Bun-specific options.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
# Supported options
|
||||
|
||||
### `registry`: Set the default registry
|
||||
|
||||
The default registry is used to resolve packages, it's default value is `npm`'s official registry (`https://registry.npmjs.org/`).
|
||||
|
||||
To change it, you can set the `registry` option in `.npmrc`:
|
||||
|
||||
```ini
|
||||
registry=http://localhost:4873/
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.registry`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
install.registry = "http://localhost:4873/"
|
||||
```
|
||||
|
||||
### `@<scope>:registry`: Set the registry for a specific scope
|
||||
|
||||
Allows you to set the registry for a specific scope:
|
||||
|
||||
```ini
|
||||
@myorg:registry=http://localhost:4873/
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
[install.scopes]
|
||||
myorg = "http://localhost:4873/"
|
||||
```
|
||||
|
||||
### `//<registry_url>/:<key>=<value>`: Confgure options for a specific registry
|
||||
|
||||
Allows you to set options for a specific registry:
|
||||
|
||||
```ini
|
||||
# set an auth token for the registry
|
||||
# ${...} is a placeholder for environment variables
|
||||
//http://localhost:4873/:_authToken=${NPM_TOKEN}
|
||||
|
||||
|
||||
# or you could set a username and password
|
||||
# note that the password is base64 encoded
|
||||
//http://localhost:4873/:username=myusername
|
||||
|
||||
//http://localhost:4873/:_password=${NPM_PASSWORD}
|
||||
|
||||
# or use _auth, which is your username and password
|
||||
# combined into a single string, which is then base 64 encoded
|
||||
//http://localhost:4873/:_auth=${NPM_AUTH}
|
||||
```
|
||||
|
||||
The following options are supported:
|
||||
|
||||
- `_authToken`
|
||||
- `username`
|
||||
- `_password` (base64 encoded password)
|
||||
- `_auth` (base64 encoded username:password, e.g. `btoa(username + ":" + password)`)
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
[install.scopes]
|
||||
myorg = { url = "http://localhost:4873/", username = "myusername", password = "$NPM_PASSWORD" }
|
||||
```
|
||||
@@ -30,10 +30,6 @@ $ docker pull oven/bun
|
||||
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```
|
||||
|
||||
```bash#Proto
|
||||
$ proto install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Windows
|
||||
@@ -146,7 +142,6 @@ $ bun upgrade
|
||||
|
||||
**Scoop users** — To avoid conflicts with Scoop, use `scoop update bun` instead.
|
||||
|
||||
**proto users** - Use `proto install bun --pin` instead.
|
||||
{% /callout %}
|
||||
|
||||
## Canary builds
|
||||
@@ -291,8 +286,4 @@ $ npm uninstall -g bun
|
||||
$ brew uninstall bun
|
||||
```
|
||||
|
||||
```bash#Proto
|
||||
$ proto uninstall bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
@@ -197,6 +197,9 @@ export default {
|
||||
description:
|
||||
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
|
||||
}),
|
||||
page("install/npmrc", ".npmrc support", {
|
||||
description: "Bun supports loading some configuration options from .npmrc",
|
||||
}),
|
||||
// page("install/utilities", "Utilities", {
|
||||
// description: "Use `bun pm` to introspect your global module cache or project dependency tree.",
|
||||
// }),
|
||||
@@ -284,8 +287,11 @@ export default {
|
||||
|
||||
divider("API"),
|
||||
page("api/http", "HTTP server", {
|
||||
description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`,
|
||||
description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/fetch", "HTTP client", {
|
||||
description: `Bun implements Web-standard fetch with some Bun-native extensions.`,
|
||||
}), // "fetch"),
|
||||
page("api/websockets", "WebSockets", {
|
||||
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
|
||||
}), // "`Bun.serve`"),
|
||||
|
||||
@@ -171,6 +171,8 @@ Once imported, you should see something like this:
|
||||
|
||||
{% image alt="Viewing heap snapshot in Safari" src="https://user-images.githubusercontent.com/709451/204429337-b0d8935f-3509-4071-b991-217794d1fb27.png" caption="Viewing heap snapshot in Safari Dev Tools" /%}
|
||||
|
||||
> The [web debugger](https://bun.sh/docs/runtime/debugger#inspect) also offers the timeline feature which allows you to track and examine the memory usage of the running debug session.
|
||||
|
||||
### Native heap stats
|
||||
|
||||
Bun uses mimalloc for the other heap. To report a summary of non-JavaScript memory usage, set the `MIMALLOC_SHOW_STATS=1` environment variable. and stats will print on exit.
|
||||
|
||||
@@ -60,7 +60,7 @@ Visual Studio can be installed graphically using the wizard or through WinGet:
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM 16
|
||||
- LLVM 18.1.8
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
@@ -78,14 +78,14 @@ After Visual Studio, you need the following:
|
||||
|
||||
```ps1#WinGet
|
||||
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
|
||||
> winget install -i LLVM.LLVM -v 16.0.6 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
|
||||
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop install llvm@16.0.6
|
||||
> scoop install llvm@18.1.8
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
@@ -179,7 +179,7 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
---
|
||||
|
||||
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=true`, then `bun --watch` will not clear the console on reload
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -48,14 +48,6 @@ In this case, we are importing from `./hello`, a relative path with no extension
|
||||
- `./hello/index.cjs`
|
||||
- `./hello/index.json`
|
||||
|
||||
Import paths are case-insensitive, meaning these are all valid imports:
|
||||
|
||||
```ts#index.ts
|
||||
import { hello } from "./hello";
|
||||
import { hello } from "./HELLO";
|
||||
import { hello } from "./hElLo";
|
||||
```
|
||||
|
||||
Import paths can optionally include extensions. If an extension is present, Bun will only check for a file with that exact extension.
|
||||
|
||||
```ts#index.ts
|
||||
|
||||
@@ -153,7 +153,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:util`](https://nodejs.org/api/util.html)
|
||||
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
|
||||
### [`node:v8`](https://nodejs.org/api/v8.html)
|
||||
|
||||
@@ -193,7 +193,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer)
|
||||
|
||||
🟡 Incomplete implementation of `base64` and `base64url` encodings.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
|
||||
|
||||
|
||||
@@ -7,22 +7,36 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- HTTP
|
||||
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
|
||||
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch)
|
||||
[`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||
[`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)
|
||||
[`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
|
||||
[`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
|
||||
|
||||
---
|
||||
|
||||
- URLs
|
||||
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
|
||||
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
|
||||
[`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
|
||||
|
||||
---
|
||||
|
||||
- Web Workers
|
||||
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) [`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) [`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
|
||||
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker)
|
||||
[`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage)
|
||||
[`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone)
|
||||
[`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort)
|
||||
[`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
|
||||
|
||||
---
|
||||
|
||||
- Streams
|
||||
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
|
||||
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
|
||||
[`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream)
|
||||
[`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream)
|
||||
[`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
|
||||
[`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
|
||||
|
||||
---
|
||||
|
||||
@@ -37,7 +51,10 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- Encoding and decoding
|
||||
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob)
|
||||
[`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa)
|
||||
[`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
|
||||
[`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
|
||||
---
|
||||
|
||||
@@ -47,7 +64,8 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- Timeouts
|
||||
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
|
||||
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout)
|
||||
[`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
|
||||
|
||||
---
|
||||
|
||||
@@ -57,14 +75,16 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- Crypto
|
||||
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
|
||||
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto)
|
||||
[`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
|
||||
[`CryptoKey`](https://developer.mozilla.org/en-US/docs/Web/API/CryptoKey)
|
||||
|
||||
---
|
||||
|
||||
- Debugging
|
||||
|
||||
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
|
||||
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
|
||||
[`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
|
||||
|
||||
---
|
||||
|
||||
@@ -79,7 +99,9 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- User interaction
|
||||
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
|
||||
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert)
|
||||
[`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm)
|
||||
[`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
|
||||
|
||||
<!-- - Blocking. Prints the alert message to terminal and awaits `[ENTER]` before proceeding. -->
|
||||
<!-- - Blocking. Prints confirmation message and awaits `[y/N]` input from user. Returns `true` if user entered `y` or `Y`, `false` otherwise.
|
||||
@@ -94,7 +116,10 @@ The following Web APIs are partially or completely supported.
|
||||
|
||||
- Events
|
||||
- [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
|
||||
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event) [`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent) [`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent) [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
|
||||
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)
|
||||
[`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent)
|
||||
[`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent)
|
||||
[`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -196,3 +196,41 @@ As of Bun v1.0.19, Bun automatically resolves the `specifier` argument to `mock.
|
||||
After resolution, the mocked module is stored in the ES Module registry **and** the CommonJS require cache. This means that you can use `import` and `require` interchangeably for mocked modules.
|
||||
|
||||
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
|
||||
|
||||
## Restore all function mocks to their original values with `mock.restore()`
|
||||
|
||||
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
|
||||
|
||||
Using `mock.restore()` can reduce the amount of code in your tests by adding it to `afterEach` blocks in each test file or even in your [test preload code](https://bun.sh/docs/runtime/bunfig#test-preload).
|
||||
|
||||
```ts
|
||||
import { expect, mock, spyOn, test } from "bun:test";
|
||||
|
||||
import * as fooModule from './foo.ts';
|
||||
import * as barModule from './bar.ts';
|
||||
import * as bazModule from './baz.ts';
|
||||
|
||||
test('foo, bar, baz', () => {
|
||||
const fooSpy = spyOn(fooModule, 'foo');
|
||||
const barSpy = spyOn(barModule, 'bar');
|
||||
const bazSpy = spyOn(bazModule, 'baz');
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
|
||||
fooSpy.mockImplementation(() => 42);
|
||||
barSpy.mockImplementation(() => 43);
|
||||
bazSpy.mockImplementation(() => 44);
|
||||
|
||||
expect(fooSpy).toBe(42);
|
||||
expect(barSpy).toBe(43);
|
||||
expect(bazSpy).toBe(44);
|
||||
|
||||
mock.restore();
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
});
|
||||
```
|
||||
|
||||
@@ -182,7 +182,7 @@ pub fn main() anyerror!void {
|
||||
|
||||
try channel.buffer.ensureTotalCapacity(1);
|
||||
|
||||
try HTTPThread.init();
|
||||
HTTPThread.init();
|
||||
|
||||
var ctx = try default_allocator.create(HTTP.HTTPChannelContext);
|
||||
ctx.* = .{
|
||||
@@ -195,7 +195,6 @@ pub fn main() anyerror!void {
|
||||
args.headers_buf,
|
||||
response_body_string,
|
||||
args.body,
|
||||
0,
|
||||
HTTP.FetchRedirect.follow,
|
||||
),
|
||||
};
|
||||
|
||||
@@ -31,7 +31,6 @@ const params = [_]clap.Param(clap.Help){
|
||||
clap.parseParam("-b, --body <STR> HTTP request body as a string") catch unreachable,
|
||||
clap.parseParam("-f, --file <STR> File path to load as body") catch unreachable,
|
||||
clap.parseParam("-n, --count <INT> How many runs? Default 10") catch unreachable,
|
||||
clap.parseParam("-t, --timeout <INT> Max duration per request") catch unreachable,
|
||||
clap.parseParam("-r, --retry <INT> Max retry count") catch unreachable,
|
||||
clap.parseParam("--no-gzip Disable gzip") catch unreachable,
|
||||
clap.parseParam("--no-deflate Disable deflate") catch unreachable,
|
||||
@@ -75,7 +74,6 @@ pub const Arguments = struct {
|
||||
body: string = "",
|
||||
turbo: bool = false,
|
||||
count: usize = 10,
|
||||
timeout: usize = 0,
|
||||
repeat: usize = 0,
|
||||
concurrency: u16 = 32,
|
||||
|
||||
@@ -165,10 +163,6 @@ pub const Arguments = struct {
|
||||
// .keep_alive = !args.flag("--no-keep-alive"),
|
||||
.concurrency = std.fmt.parseInt(u16, args.option("--max-concurrency") orelse "32", 10) catch 32,
|
||||
.turbo = args.flag("--turbo"),
|
||||
.timeout = std.fmt.parseInt(usize, args.option("--timeout") orelse "0", 10) catch |err| {
|
||||
Output.prettyErrorln("<r><red>{s}<r> parsing timeout", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
},
|
||||
.count = std.fmt.parseInt(usize, args.option("--count") orelse "10", 10) catch |err| {
|
||||
Output.prettyErrorln("<r><red>{s}<r> parsing count", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
@@ -225,7 +219,6 @@ pub fn main() anyerror!void {
|
||||
args.headers_buf,
|
||||
response_body,
|
||||
"",
|
||||
args.timeout,
|
||||
),
|
||||
};
|
||||
ctx.http.client.verbose = args.verbose;
|
||||
|
||||
37
package.json
37
package.json
@@ -4,24 +4,22 @@
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vscode/debugadapter": "^1.61.0",
|
||||
"esbuild": "^0.17.15",
|
||||
"eslint": "^8.20.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"mitata": "^0.1.3",
|
||||
"devDependencies": {
|
||||
"@vscode/debugadapter": "^1.65.0",
|
||||
"esbuild": "^0.21.4",
|
||||
"eslint": "^9.4.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.2.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"source-map-js": "^1.0.2",
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.1.2",
|
||||
"@types/react": "^18.0.25",
|
||||
"@typescript-eslint/eslint-plugin": "^5.31.0",
|
||||
"@typescript-eslint/parser": "^5.31.0"
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.4.5",
|
||||
"@types/bun": "^1.1.3",
|
||||
"@types/react": "^18.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.11.0",
|
||||
"@typescript-eslint/parser": "^7.11.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"bun-types": "workspace:packages/bun-types"
|
||||
@@ -34,6 +32,7 @@
|
||||
"build:tidy": "BUN_SILENT=1 cmake --log-level=WARNING . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout} && BUN_SILENT=1 ninja -Cbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout}",
|
||||
"build:tidy-extra": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY_EXTRA=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy && ninja -Cbuild-tidy",
|
||||
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:release:local": "cmake . -DCMAKE_BUILD_TYPE=Release -DWEBKIT_DIR=$(pwd)/src/bun.js/WebKit/WebKitBuild/Release -GNinja -Bbuild-release-local && ninja -Cbuild-release-local",
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
|
||||
@@ -43,10 +42,12 @@
|
||||
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
|
||||
"lint": "eslint './**/*.d.ts' --cache",
|
||||
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
|
||||
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
|
||||
"test": "node scripts/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
|
||||
"banned": "bun packages/bun-internal-test/src/linter.ts",
|
||||
"zig-check": ".cache/zig/zig.exe build check --summary new",
|
||||
"zig-check-all": ".cache/zig/zig.exe build check-all --summary new",
|
||||
"zig-check-windows": ".cache/zig/zig.exe build check-windows --summary new",
|
||||
"zig": ".cache/zig/zig.exe "
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,13 @@
|
||||
"std.debug.assert": "Use bun.assert instead",
|
||||
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
|
||||
"std.debug.print": "Don't let this be committed",
|
||||
"std.mem.indexOfAny": "Use bun.strings.indexAny or bun.strings.indexAnyComptime",
|
||||
"std.mem.indexOfAny(u8": "Use bun.strings.indexOfAny",
|
||||
"undefined != ": "This is by definition Undefined Behavior.",
|
||||
"undefined == ": "This is by definition Undefined Behavior.",
|
||||
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
|
||||
"std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`",
|
||||
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
|
||||
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
|
||||
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
|
||||
"": ""
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@ for (const [banned, suggestion] of Object.entries(BANNED)) {
|
||||
if (banned.length === 0) continue;
|
||||
// Run git grep to find occurrences of std.debug.assert in .zig files
|
||||
// .nothrow() is here since git will exit with non-zero if no matches are found.
|
||||
let stdout = await $`git grep -n -F "${banned}" "src/**/**.zig" | grep -v -F '//' | grep -v -F bench`
|
||||
.nothrow()
|
||||
.text();
|
||||
let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text();
|
||||
|
||||
stdout = stdout.trim();
|
||||
if (stdout.length === 0) continue;
|
||||
|
||||
@@ -290,7 +290,7 @@ function formatBody(body?: string, isBase64Encoded?: boolean): string | null {
|
||||
if (!isBase64Encoded) {
|
||||
return body;
|
||||
}
|
||||
return Buffer.from(body).toString("base64");
|
||||
return Buffer.from(body, "base64").toString("utf8");
|
||||
}
|
||||
|
||||
type HttpEventV1 = {
|
||||
|
||||
@@ -22,10 +22,10 @@ bun upgrade
|
||||
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
|
||||
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
|
||||
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
|
||||
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
|
||||
- [Windows](https://www.npmjs.com/package/@oven/bun-windows-x64)
|
||||
- [Windows (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-windows-x64-baseline)
|
||||
|
||||
### Future Platforms
|
||||
|
||||
- [Windows](https://github.com/oven-sh/bun/issues/43)
|
||||
- Unix-like variants such as FreeBSD, OpenBSD, etc.
|
||||
- Android and iOS
|
||||
|
||||
40
packages/bun-types/bun.d.ts
vendored
40
packages/bun-types/bun.d.ts
vendored
@@ -1455,7 +1455,7 @@ declare module "bun" {
|
||||
* ```js
|
||||
* const {imports, exports} = transpiler.scan(`
|
||||
* import {foo} from "baz";
|
||||
* const hello = "hi!";
|
||||
* export const hello = "hi!";
|
||||
* `);
|
||||
*
|
||||
* console.log(imports); // ["baz"]
|
||||
@@ -1516,6 +1516,7 @@ declare module "bun" {
|
||||
plugins?: BunPlugin[];
|
||||
// manifest?: boolean; // whether to return manifest
|
||||
external?: string[];
|
||||
packages?: "bundle" | "external";
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
// origin?: string; // e.g. http://mydomain.com
|
||||
@@ -2968,7 +2969,7 @@ declare module "bun" {
|
||||
* Returns 0 if the versions are equal, 1 if `v1` is greater, or -1 if `v2` is greater.
|
||||
* Throws an error if either version is invalid.
|
||||
*/
|
||||
order(v1: StringLike, v2: StringLike): -1 | 0 | 1;
|
||||
order(this: void, v1: StringLike, v2: StringLike): -1 | 0 | 1;
|
||||
}
|
||||
var semver: Semver;
|
||||
|
||||
@@ -3099,6 +3100,10 @@ declare module "bun" {
|
||||
*/
|
||||
function openInEditor(path: string, options?: EditorOptions): void;
|
||||
|
||||
const fetch: typeof globalThis.fetch & {
|
||||
preconnect(url: string): void;
|
||||
};
|
||||
|
||||
interface EditorOptions {
|
||||
editor?: "vscode" | "subl";
|
||||
line?: number;
|
||||
@@ -3476,6 +3481,13 @@ declare module "bun" {
|
||||
* Filtered data consists mostly of small values with a somewhat random distribution.
|
||||
*/
|
||||
strategy?: number;
|
||||
|
||||
library?: "zlib";
|
||||
}
|
||||
|
||||
interface LibdeflateCompressionOptions {
|
||||
level?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12;
|
||||
library?: "libdeflate";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3484,26 +3496,38 @@ declare module "bun" {
|
||||
* @param options Compression options to use
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function deflateSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
|
||||
function deflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Compresses a chunk of data with `zlib` GZIP algorithm.
|
||||
* @param data The buffer of data to compress
|
||||
* @param options Compression options to use
|
||||
* @returns The output buffer with the compressed data
|
||||
*/
|
||||
function gzipSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
|
||||
function gzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` INFLATE algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function inflateSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
|
||||
function inflateSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
/**
|
||||
* Decompresses a chunk of data with `zlib` GUNZIP algorithm.
|
||||
* @param data The buffer of data to decompress
|
||||
* @returns The output buffer with the decompressed data
|
||||
*/
|
||||
function gunzipSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
|
||||
function gunzipSync(
|
||||
data: Uint8Array | string | ArrayBuffer,
|
||||
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
|
||||
): Uint8Array;
|
||||
|
||||
type Target =
|
||||
/**
|
||||
@@ -3823,7 +3847,7 @@ declare module "bun" {
|
||||
*/
|
||||
const isMainThread: boolean;
|
||||
|
||||
interface Socket<Data = undefined> {
|
||||
interface Socket<Data = undefined> extends Disposable {
|
||||
/**
|
||||
* Write `data` to the socket
|
||||
*
|
||||
@@ -4105,7 +4129,7 @@ declare module "bun" {
|
||||
setMaxSendFragment(size: number): boolean;
|
||||
}
|
||||
|
||||
interface SocketListener<Data = undefined> {
|
||||
interface SocketListener<Data = undefined> extends Disposable {
|
||||
stop(closeActiveConnections?: boolean): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
|
||||
54
packages/bun-types/globals.d.ts
vendored
54
packages/bun-types/globals.d.ts
vendored
@@ -907,26 +907,42 @@ declare global {
|
||||
new (): ShadowRealm;
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param request Request object
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
interface Fetch {
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param request Request object
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
(request: Request, init?: RequestInit): Promise<Response>;
|
||||
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
function fetch(request: Request, init?: RequestInit): Promise<Response>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
function fetch(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*/
|
||||
(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
|
||||
|
||||
(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response>;
|
||||
|
||||
/**
|
||||
* Start the DNS resolution, TCP connection, and TLS handshake for a request
|
||||
* before the request is actually sent.
|
||||
*
|
||||
* This can reduce the latency of a request when you know there's some
|
||||
* long-running task that will delay the request starting.
|
||||
*
|
||||
* This is a bun-specific API and is not part of the Fetch API specification.
|
||||
*/
|
||||
preconnect(url: string | URL): void;
|
||||
}
|
||||
|
||||
var fetch: Fetch;
|
||||
|
||||
function queueMicrotask(callback: (...args: any[]) => void): void;
|
||||
/**
|
||||
|
||||
35
packages/bun-types/jsc.d.ts
vendored
35
packages/bun-types/jsc.d.ts
vendored
@@ -78,21 +78,7 @@ declare module "bun:jsc" {
|
||||
*/
|
||||
function setTimeZone(timeZone: string): string;
|
||||
|
||||
/**
|
||||
* Run JavaScriptCore's sampling profiler for a particular function
|
||||
*
|
||||
* This is pretty low-level.
|
||||
*
|
||||
* Things to know:
|
||||
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
|
||||
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
|
||||
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
|
||||
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
|
||||
*/
|
||||
function profile(
|
||||
callback: CallableFunction,
|
||||
sampleInterval?: number,
|
||||
): {
|
||||
interface SamplingProfile {
|
||||
/**
|
||||
* A formatted summary of the top functions
|
||||
*
|
||||
@@ -183,7 +169,24 @@ declare module "bun:jsc" {
|
||||
* Stack traces of the top functions
|
||||
*/
|
||||
stackTraces: string[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run JavaScriptCore's sampling profiler for a particular function
|
||||
*
|
||||
* This is pretty low-level.
|
||||
*
|
||||
* Things to know:
|
||||
* - LLint means "Low Level Interpreter", which is the interpreter that runs before any JIT compilation
|
||||
* - Baseline is the first JIT compilation tier. It's the least optimized, but the fastest to compile
|
||||
* - DFG means "Data Flow Graph", which is the second JIT compilation tier. It has some optimizations, but is slower to compile
|
||||
* - FTL means "Faster Than Light", which is the third JIT compilation tier. It has the most optimizations, but is the slowest to compile
|
||||
*/
|
||||
function profile<T extends (...args: any[]) => any>(
|
||||
callback: T,
|
||||
sampleInterval?: number,
|
||||
...args: Parameters<T>
|
||||
): ReturnType<T> extends Promise<infer U> ? Promise<SamplingProfile> : SamplingProfile;
|
||||
|
||||
/**
|
||||
* This returns objects which native code has explicitly protected from being
|
||||
|
||||
10
packages/bun-types/sqlite.d.ts
vendored
10
packages/bun-types/sqlite.d.ts
vendored
@@ -36,7 +36,7 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const db = new Database("mydb.sqlite");
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* console.log(db.query("SELECT * FROM foo").all());
|
||||
* ```
|
||||
*
|
||||
@@ -47,7 +47,7 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const db = new Database(":memory:");
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "hiiiiii");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["hiiiiii"]);
|
||||
* console.log(db.query("SELECT * FROM foo").all());
|
||||
* ```
|
||||
*
|
||||
@@ -158,7 +158,7 @@ declare module "bun:sqlite" {
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* ```
|
||||
*
|
||||
* Useful for queries like:
|
||||
@@ -268,9 +268,9 @@ declare module "bun:sqlite" {
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "baz");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* db.run("BEGIN");
|
||||
* db.run("INSERT INTO foo VALUES (?)", "qux");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["qux"]);
|
||||
* console.log(db.inTransaction());
|
||||
* ```
|
||||
*/
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
#include <stdlib.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
// Necessary for the stdint include
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netinet/in.h>
|
||||
@@ -42,6 +46,7 @@
|
||||
#define HAS_MSGX
|
||||
#endif
|
||||
|
||||
|
||||
/* We need to emulate sendmmsg, recvmmsg on platform who don't have it */
|
||||
int bsd_sendmmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct udp_sendbuf* sendbuf, int flags) {
|
||||
#if defined(_WIN32)// || defined(__APPLE__)
|
||||
@@ -397,7 +402,9 @@ int bsd_addr_get_port(struct bsd_addr_t *addr) {
|
||||
// called by dispatch_ready_poll
|
||||
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr) {
|
||||
LIBUS_SOCKET_DESCRIPTOR accepted_fd;
|
||||
addr->len = sizeof(addr->mem);
|
||||
|
||||
while (1) {
|
||||
addr->len = sizeof(addr->mem);
|
||||
|
||||
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
|
||||
// Linux, FreeBSD
|
||||
@@ -405,12 +412,18 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
#else
|
||||
// Windows, OS X
|
||||
accepted_fd = accept(fd, (struct sockaddr *) addr, &addr->len);
|
||||
|
||||
#endif
|
||||
|
||||
/* We cannot rely on addr since it is not initialized if failed */
|
||||
if (accepted_fd == LIBUS_SOCKET_ERROR) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
if (UNLIKELY(IS_EINTR(accepted_fd))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* We cannot rely on addr since it is not initialized if failed */
|
||||
if (accepted_fd == LIBUS_SOCKET_ERROR) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
internal_finalize_bsd_addr(addr);
|
||||
@@ -423,14 +436,22 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
|
||||
#endif
|
||||
}
|
||||
|
||||
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
return recv(fd, buf, length, flags);
|
||||
ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
while (1) {
|
||||
ssize_t ret = recv(fd, buf, length, flags);
|
||||
|
||||
if (UNLIKELY(IS_EINTR(ret))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
#if !defined(_WIN32)
|
||||
#include <sys/uio.h>
|
||||
|
||||
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
struct iovec chunks[2];
|
||||
|
||||
chunks[0].iov_base = (char *)header;
|
||||
@@ -438,13 +459,21 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
|
||||
chunks[1].iov_base = (char *)payload;
|
||||
chunks[1].iov_len = payload_length;
|
||||
|
||||
return writev(fd, chunks, 2);
|
||||
while (1) {
|
||||
ssize_t written = writev(fd, chunks, 2);
|
||||
|
||||
if (UNLIKELY(IS_EINTR(written))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return written;
|
||||
}
|
||||
}
|
||||
#else
|
||||
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
int written = bsd_send(fd, header, header_length, 0);
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) {
|
||||
ssize_t written = bsd_send(fd, header, header_length, 0);
|
||||
if (written == header_length) {
|
||||
int second_write = bsd_send(fd, payload, payload_length, 0);
|
||||
ssize_t second_write = bsd_send(fd, payload, payload_length, 0);
|
||||
if (second_write > 0) {
|
||||
written += second_write;
|
||||
}
|
||||
@@ -453,26 +482,28 @@ int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length
|
||||
}
|
||||
#endif
|
||||
|
||||
int bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
|
||||
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) {
|
||||
while (1) {
|
||||
// MSG_MORE (Linux), MSG_PARTIAL (Windows), TCP_NOPUSH (BSD)
|
||||
|
||||
#ifndef MSG_NOSIGNAL
|
||||
#define MSG_NOSIGNAL 0
|
||||
#endif
|
||||
|
||||
#ifdef MSG_MORE
|
||||
#ifdef MSG_MORE
|
||||
// for Linux we do not want signals
|
||||
ssize_t rc = send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#else
|
||||
// use TCP_NOPUSH
|
||||
ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
#endif
|
||||
|
||||
// for Linux we do not want signals
|
||||
return send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
if (UNLIKELY(IS_EINTR(rc))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
// use TCP_NOPUSH
|
||||
|
||||
return send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT);
|
||||
|
||||
#endif
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
int bsd_would_block() {
|
||||
@@ -483,6 +514,23 @@ int bsd_would_block() {
|
||||
#endif
|
||||
}
|
||||
|
||||
static int us_internal_bind_and_listen(LIBUS_SOCKET_DESCRIPTOR listenFd, struct sockaddr *listenAddr, socklen_t listenAddrLength, int backlog) {
|
||||
int result;
|
||||
do
|
||||
result = bind(listenFd, listenAddr, listenAddrLength);
|
||||
while (IS_EINTR(result));
|
||||
|
||||
if (result == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
do
|
||||
result = listen(listenFd, backlog);
|
||||
while (IS_EINTR(result));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd(
|
||||
LIBUS_SOCKET_DESCRIPTOR listenFd,
|
||||
struct addrinfo *listenAddr,
|
||||
@@ -512,7 +560,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
|
||||
setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
|
||||
#endif
|
||||
|
||||
if (bind(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen) || listen(listenFd, 512)) {
|
||||
if (us_internal_bind_and_listen(listenFd, listenAddr->ai_addr, (socklen_t) listenAddr->ai_addrlen, 512)) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
@@ -690,7 +738,7 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
|
||||
unlink(path);
|
||||
#endif
|
||||
|
||||
if (bind(listenFd, (struct sockaddr *)server_address, addrlen) || listen(listenFd, 512)) {
|
||||
if (us_internal_bind_and_listen(listenFd, (struct sockaddr *) server_address, (socklen_t) addrlen, 512)) {
|
||||
#if defined(_WIN32)
|
||||
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
|
||||
#endif
|
||||
@@ -838,7 +886,7 @@ int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int por
|
||||
}
|
||||
|
||||
freeaddrinfo(result);
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
return (int)LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
int bsd_disconnect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd) {
|
||||
@@ -925,7 +973,7 @@ static int bsd_do_connect_raw(LIBUS_SOCKET_DESCRIPTOR fd, struct sockaddr *addr,
|
||||
do {
|
||||
errno = 0;
|
||||
r = connect(fd, (struct sockaddr *)addr, namelen);
|
||||
} while (r == -1 && errno == EINTR);
|
||||
} while (IS_EINTR(r));
|
||||
|
||||
// connect() can return -1 with an errno of 0.
|
||||
// the errno is the correct one in that case.
|
||||
|
||||
@@ -15,18 +15,18 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "libusockets.h"
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <errno.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <errno.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <arpa/inet.h>
|
||||
#endif
|
||||
|
||||
#define CONCURRENT_CONNECTIONS 2
|
||||
|
||||
// clang-format off
|
||||
int default_is_low_prio_handler(struct us_socket_t *s) {
|
||||
return 0;
|
||||
}
|
||||
@@ -44,7 +44,7 @@ int us_raw_root_certs(struct us_cert_string_t**out){
|
||||
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
/* us_listen_socket_t extends us_socket_t so we close in similar ways */
|
||||
if (!us_socket_is_closed(0, &ls->s)) {
|
||||
us_internal_socket_context_unlink_listen_socket(ls->s.context, ls);
|
||||
us_internal_socket_context_unlink_listen_socket(ssl, ls->s.context, ls);
|
||||
us_poll_stop((struct us_poll_t *) &ls->s, ls->s.context->loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) &ls->s));
|
||||
|
||||
@@ -72,12 +72,12 @@ void us_socket_context_close(int ssl, struct us_socket_context_t *context) {
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
struct us_socket_t *nextS = s->next;
|
||||
us_socket_close(ssl, s, 0, 0);
|
||||
us_socket_close(ssl, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
s = nextS;
|
||||
}
|
||||
}
|
||||
|
||||
void us_internal_socket_context_unlink_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
void us_internal_socket_context_unlink_listen_socket(int ssl, struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
/* We have to properly update the iterator used to sweep sockets for timeouts */
|
||||
if (ls == (struct us_listen_socket_t *) context->iterator) {
|
||||
context->iterator = ls->s.next;
|
||||
@@ -95,9 +95,10 @@ void us_internal_socket_context_unlink_listen_socket(struct us_socket_context_t
|
||||
ls->s.next->prev = ls->s.prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
void us_internal_socket_context_unlink_socket(struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
void us_internal_socket_context_unlink_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
/* We have to properly update the iterator used to sweep sockets for timeouts */
|
||||
if (s == context->iterator) {
|
||||
context->iterator = s->next;
|
||||
@@ -115,6 +116,7 @@ void us_internal_socket_context_unlink_socket(struct us_socket_context_t *contex
|
||||
s->next->prev = s->prev;
|
||||
}
|
||||
}
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
@@ -126,6 +128,7 @@ void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *c
|
||||
context->head_listen_sockets->s.prev = &ls->s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
context->ref_count++;
|
||||
}
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
@@ -137,6 +140,7 @@ void us_internal_socket_context_link_socket(struct us_socket_context_t *context,
|
||||
context->head_sockets->prev = s;
|
||||
}
|
||||
context->head_sockets = s;
|
||||
context->ref_count++;
|
||||
}
|
||||
|
||||
struct us_loop_t *us_socket_context_loop(int ssl, struct us_socket_context_t *context) {
|
||||
@@ -231,6 +235,7 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
|
||||
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
|
||||
context->loop = loop;
|
||||
context->is_low_prio = default_is_low_prio_handler;
|
||||
context->ref_count = 1;
|
||||
|
||||
us_internal_loop_link(loop, context);
|
||||
|
||||
@@ -252,6 +257,7 @@ struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop
|
||||
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
|
||||
context->loop = loop;
|
||||
context->is_low_prio = default_is_low_prio_handler;
|
||||
context->ref_count = 1;
|
||||
|
||||
us_internal_loop_link(loop, context);
|
||||
|
||||
@@ -272,7 +278,8 @@ struct us_bun_verify_error_t us_socket_verify_error(int ssl, struct us_socket_t
|
||||
}
|
||||
|
||||
|
||||
void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
|
||||
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
/* This function will call us again with SSL=false */
|
||||
@@ -285,7 +292,23 @@ void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
* This is the opposite order compared to when creating the context - SSL code is cleaning up before non-SSL */
|
||||
|
||||
us_internal_loop_unlink(context->loop, context);
|
||||
us_free(context);
|
||||
/* Link this context to the close-list and let it be deleted after this iteration */
|
||||
context->next = context->loop->data.closed_context_head;
|
||||
context->loop->data.closed_context_head = context;
|
||||
}
|
||||
|
||||
void us_socket_context_ref(int ssl, struct us_socket_context_t *context) {
|
||||
context->ref_count++;
|
||||
}
|
||||
|
||||
void us_socket_context_unref(int ssl, struct us_socket_context_t *context) {
|
||||
if (--context->ref_count == 0) {
|
||||
us_internal_socket_context_free(ssl, context);
|
||||
}
|
||||
}
|
||||
|
||||
void us_socket_context_free(int ssl, struct us_socket_context_t *context) {
|
||||
us_socket_context_unref(ssl, context);
|
||||
}
|
||||
|
||||
struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_context_t *context, const char *host, int port, int options, int socket_ext_size) {
|
||||
@@ -709,7 +732,7 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
|
||||
if (s->low_prio_state != 1) {
|
||||
/* This properly updates the iterator if in on_timeout */
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -14,8 +14,14 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#if (defined(LIBUS_USE_OPENSSL) || defined(LIBUS_USE_WOLFSSL))
|
||||
|
||||
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <string.h>
|
||||
|
||||
/* These are in sni_tree.cpp */
|
||||
void *sni_new();
|
||||
void sni_free(void *sni, void (*cb)(void *));
|
||||
@@ -23,10 +29,6 @@ int sni_add(void *sni, const char *hostname, void *user);
|
||||
void *sni_remove(void *sni, const char *hostname);
|
||||
void *sni_find(void *sni, const char *hostname);
|
||||
|
||||
#include "internal/internal.h"
|
||||
#include "libusockets.h"
|
||||
#include <string.h>
|
||||
|
||||
/* This module contains the entire OpenSSL implementation
|
||||
* of the SSL socket and socket context interfaces. */
|
||||
#ifdef LIBUS_USE_OPENSSL
|
||||
@@ -71,10 +73,6 @@ struct us_internal_ssl_socket_context_t {
|
||||
// socket context
|
||||
SSL_CTX *ssl_context;
|
||||
int is_parent;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
unsigned int client_renegotiation_limit;
|
||||
unsigned int client_renegotiation_window;
|
||||
#endif
|
||||
/* These decorate the base implementation */
|
||||
struct us_internal_ssl_socket_t *(*on_open)(struct us_internal_ssl_socket_t *,
|
||||
int is_client, char *ip,
|
||||
@@ -108,15 +106,9 @@ enum {
|
||||
struct us_internal_ssl_socket_t {
|
||||
struct us_socket_t s;
|
||||
SSL *ssl; // this _must_ be the first member after s
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
unsigned int client_pending_renegotiations;
|
||||
uint64_t last_ssl_renegotiation;
|
||||
unsigned int is_client : 1;
|
||||
#endif
|
||||
unsigned int ssl_write_wants_read : 1; // we use this for now
|
||||
unsigned int ssl_read_wants_write : 1;
|
||||
unsigned int handshake_state : 2;
|
||||
unsigned int received_ssl_shutdown : 1;
|
||||
};
|
||||
|
||||
int passphrase_cb(char *buf, int size, int rwflag, void *u) {
|
||||
@@ -194,16 +186,9 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
(struct loop_ssl_data *)loop->data.ssl_data;
|
||||
|
||||
s->ssl = SSL_new(context->ssl_context);
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
s->client_pending_renegotiations = context->client_renegotiation_limit;
|
||||
s->last_ssl_renegotiation = 0;
|
||||
s->is_client = is_client ? 1 : 0;
|
||||
|
||||
#endif
|
||||
s->ssl_write_wants_read = 0;
|
||||
s->ssl_read_wants_write = 0;
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
s->received_ssl_shutdown = 0;
|
||||
|
||||
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
|
||||
// if we allow renegotiation, we need to set the mode here
|
||||
@@ -213,24 +198,18 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
// this can be a DoS vector for servers, so we enable it using a limit
|
||||
// we do not use ssl_renegotiate_freely, since ssl_renegotiate_explicit is
|
||||
// more performant when using BoringSSL
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
if (context->client_renegotiation_limit) {
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
|
||||
} else {
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_never);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
BIO_up_ref(loop_ssl_data->shared_rbio);
|
||||
BIO_up_ref(loop_ssl_data->shared_wbio);
|
||||
|
||||
if (is_client) {
|
||||
#if ALLOW_SERVER_RENEGOTIATION == 0
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
|
||||
#endif
|
||||
SSL_set_connect_state(s->ssl);
|
||||
} else {
|
||||
SSL_set_accept_state(s->ssl);
|
||||
// we do not allow renegotiation on the server side (should be the default for BoringSSL, but we set to make openssl compatible)
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_never);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *result =
|
||||
@@ -246,6 +225,36 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
return result;
|
||||
}
|
||||
|
||||
/// @brief Complete the shutdown or do a fast shutdown when needed, this should only be called before closing the socket
|
||||
/// @param s
|
||||
void us_internal_handle_shutdown(struct us_internal_ssl_socket_t *s, int force_fast_shutdown) {
|
||||
// if we are already shutdown or in the middle of a handshake we dont need to do anything
|
||||
if(!s->ssl || us_socket_is_shut_down(0, &s->s)) return;
|
||||
|
||||
// we are closing the socket but did not sent a shutdown yet
|
||||
int state = SSL_get_shutdown(s->ssl);
|
||||
int sent_shutdown = state & SSL_SENT_SHUTDOWN;
|
||||
int received_shutdown = state & SSL_RECEIVED_SHUTDOWN;
|
||||
// if we are missing a shutdown call, we need to do a fast shutdown here
|
||||
if(!sent_shutdown || !received_shutdown) {
|
||||
// Zero means that we should wait for the peer to close the connection
|
||||
// but we are already closing the connection so we do a fast shutdown here
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
if(ret == 0 && force_fast_shutdown) {
|
||||
// do a fast shutdown (dont wait for peer)
|
||||
ret = SSL_shutdown(s->ssl);
|
||||
}
|
||||
if(ret < 0) {
|
||||
// we got some error here, but we dont care about it, we are closing the socket
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// clear
|
||||
ERR_clear_error();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void us_internal_on_ssl_handshake(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
void (*on_handshake)(struct us_internal_ssl_socket_t *, int success,
|
||||
@@ -259,6 +268,8 @@ void us_internal_on_ssl_handshake(
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
|
||||
|
||||
if (s->handshake_state != HANDSHAKE_COMPLETED) {
|
||||
// if we have some pending handshake we cancel it and try to check the
|
||||
// latest handshake error this way we will always call on_handshake with the
|
||||
@@ -269,8 +280,14 @@ us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
// if we are in the middle of a close_notify we need to finish it (code != 0 forces a fast shutdown)
|
||||
us_internal_handle_shutdown(s, code != 0);
|
||||
|
||||
// only close the socket if we are not in the middle of a handshake
|
||||
if(!s->ssl || SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
void us_internal_trigger_handshake_callback(struct us_internal_ssl_socket_t *s,
|
||||
@@ -292,26 +309,7 @@ int us_internal_ssl_renegotiate(struct us_internal_ssl_socket_t *s) {
|
||||
// if is a server and we have no pending renegotiation we can check
|
||||
// the limits
|
||||
s->handshake_state = HANDSHAKE_RENEGOTIATION_PENDING;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
if (!s->is_client && !SSL_renegotiate_pending(s->ssl)) {
|
||||
uint64_t now = time(NULL);
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
// if is not the first time we negotiate and we are outside the time
|
||||
// window, reset the limits
|
||||
if (s->last_ssl_renegotiation && (now - s->last_ssl_renegotiation) >=
|
||||
context->client_renegotiation_window) {
|
||||
// reset the limits
|
||||
s->client_pending_renegotiations = context->client_renegotiation_limit;
|
||||
}
|
||||
// if we have no more renegotiations, we should close the connection
|
||||
if (s->client_pending_renegotiations == 0) {
|
||||
return 0;
|
||||
}
|
||||
s->last_ssl_renegotiation = now;
|
||||
s->client_pending_renegotiations--;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!SSL_renegotiate(s->ssl)) {
|
||||
// we failed to renegotiate
|
||||
us_internal_trigger_handshake_callback(s, 0);
|
||||
@@ -347,7 +345,6 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
int result = SSL_do_handshake(s->ssl);
|
||||
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return;
|
||||
}
|
||||
@@ -387,16 +384,15 @@ ssl_on_close(struct us_internal_ssl_socket_t *s, int code, void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
SSL_free(s->ssl);
|
||||
|
||||
return context->on_close(s, code, reason);
|
||||
struct us_internal_ssl_socket_t * ret = context->on_close(s, code, reason);
|
||||
SSL_free(s->ssl); // free SSL after on_close
|
||||
s->ssl = NULL; // set to NULL
|
||||
return ret;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
// whatever state we are in, a TCP FIN is always an answered shutdown
|
||||
|
||||
/* Todo: this should report CLEANLY SHUTDOWN as reason */
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
}
|
||||
|
||||
@@ -420,31 +416,13 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || s->received_ssl_shutdown) {
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (us_internal_ssl_socket_is_shut_down(s)) {
|
||||
|
||||
int ret = 0;
|
||||
if ((ret = SSL_shutdown(s->ssl)) == 1) {
|
||||
// two phase shutdown is complete here
|
||||
|
||||
/* Todo: this should also report some kind of clean shutdown */
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
} else if (ret < 0) {
|
||||
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// we need to clear the error queue in case these added to the thread
|
||||
// local queue
|
||||
ERR_clear_error();
|
||||
}
|
||||
}
|
||||
|
||||
// no further processing of data when in shutdown state
|
||||
return s;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// bug checking: this loop needs a lot of attention and clean-ups and
|
||||
@@ -452,17 +430,12 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
int read = 0;
|
||||
restart:
|
||||
// read until shutdown
|
||||
while (!s->received_ssl_shutdown) {
|
||||
while (1) {
|
||||
int just_read = SSL_read(s->ssl,
|
||||
loop_ssl_data->ssl_read_output +
|
||||
LIBUS_RECV_BUFFER_PADDING + read,
|
||||
LIBUS_RECV_BUFFER_LENGTH - read);
|
||||
// we need to check if we received a shutdown here
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
// we will only close after we handle the data and errors
|
||||
}
|
||||
|
||||
|
||||
if (just_read <= 0) {
|
||||
int err = SSL_get_error(s->ssl, just_read);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
@@ -477,8 +450,9 @@ restart:
|
||||
// clean and close renegotiation failed
|
||||
err = SSL_ERROR_SSL;
|
||||
} else if (err == SSL_ERROR_ZERO_RETURN) {
|
||||
// zero return can be EOF/FIN, if we have data just signal on_data and
|
||||
// close
|
||||
// Remotely-Initiated Shutdown
|
||||
// See: https://www.openssl.org/docs/manmaster/man3/SSL_shutdown.html
|
||||
|
||||
if (read) {
|
||||
context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(
|
||||
@@ -488,11 +462,12 @@ restart:
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
}
|
||||
// terminate connection here
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
@@ -501,7 +476,8 @@ restart:
|
||||
}
|
||||
|
||||
// terminate connection here
|
||||
return us_internal_ssl_socket_close(s, 0, NULL);
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL; // stop processing data
|
||||
} else {
|
||||
// emit the data we have and exit
|
||||
|
||||
@@ -527,7 +503,7 @@ restart:
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
return NULL; // stop processing data
|
||||
}
|
||||
|
||||
break;
|
||||
@@ -550,19 +526,13 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING, read);
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
read = 0;
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
// we received the shutdown after reading so we close
|
||||
if (s->received_ssl_shutdown) {
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
// trigger writable if we failed last write with want read
|
||||
if (s->ssl_write_wants_read) {
|
||||
s->ssl_write_wants_read = 0;
|
||||
@@ -576,7 +546,7 @@ restart:
|
||||
&s->s); // cast here!
|
||||
// if we are closed here, then exit
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1032,7 +1002,7 @@ long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
|
||||
struct us_bun_verify_error_t
|
||||
us_internal_verify_error(struct us_internal_ssl_socket_t *s) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (!s->ssl || us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
return (struct us_bun_verify_error_t){
|
||||
.error = 0, .code = NULL, .reason = NULL};
|
||||
}
|
||||
@@ -1317,10 +1287,6 @@ void us_bun_internal_ssl_socket_context_add_server_name(
|
||||
|
||||
/* We do not want to hold any nullptr's in our SNI tree */
|
||||
if (ssl_context) {
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
context->client_renegotiation_limit = options.client_renegotiation_limit;
|
||||
context->client_renegotiation_window = options.client_renegotiation_window;
|
||||
#endif
|
||||
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
|
||||
/* If we already had that name, ignore */
|
||||
free_ssl_context(ssl_context);
|
||||
@@ -1469,10 +1435,6 @@ us_internal_bun_create_ssl_socket_context(
|
||||
|
||||
context->on_handshake = NULL;
|
||||
context->handshake_data = NULL;
|
||||
#if ALLOW_SERVER_RENEGOTIATION
|
||||
context->client_renegotiation_limit = options.client_renegotiation_limit;
|
||||
context->client_renegotiation_window = options.client_renegotiation_window;
|
||||
#endif
|
||||
/* We, as parent context, may ignore data */
|
||||
context->sc.is_low_prio = (int (*)(struct us_socket_t *))ssl_is_low_prio;
|
||||
|
||||
@@ -1503,7 +1465,7 @@ void us_internal_ssl_socket_context_free(
|
||||
sni_free(context->sni, sni_hostname_destructor);
|
||||
}
|
||||
|
||||
us_socket_context_free(0, &context->sc);
|
||||
us_internal_socket_context_free(0, &context->sc);
|
||||
}
|
||||
|
||||
struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
|
||||
@@ -1714,7 +1676,7 @@ void *us_internal_connecting_ssl_socket_ext(struct us_connecting_socket_t *s) {
|
||||
}
|
||||
|
||||
int us_internal_ssl_socket_is_shut_down(struct us_internal_ssl_socket_t *s) {
|
||||
return us_socket_is_shut_down(0, &s->s) ||
|
||||
return !s->ssl || us_socket_is_shut_down(0, &s->s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_SENT_SHUTDOWN;
|
||||
}
|
||||
|
||||
@@ -1740,15 +1702,17 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
// sets SSL_SENT_SHUTDOWN no matter what (not actually true if error!)
|
||||
// sets SSL_SENT_SHUTDOWN and waits for the other side to do the same
|
||||
int ret = SSL_shutdown(s->ssl);
|
||||
if (ret == 0) {
|
||||
ret = SSL_shutdown(s->ssl);
|
||||
|
||||
if (SSL_in_init(s->ssl) || SSL_get_quiet_shutdown(s->ssl)) {
|
||||
// when SSL_in_init or quiet shutdown in BoringSSL, we call shutdown
|
||||
// directly
|
||||
us_socket_shutdown(0, &s->s);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ret < 0) {
|
||||
|
||||
int err = SSL_get_error(s->ssl, ret);
|
||||
if (err == SSL_ERROR_SSL || err == SSL_ERROR_SYSCALL) {
|
||||
// clear
|
||||
@@ -2044,7 +2008,6 @@ us_socket_context_on_socket_connect_error(
|
||||
socket->ssl_write_wants_read = 0;
|
||||
socket->ssl_read_wants_write = 0;
|
||||
socket->handshake_state = HANDSHAKE_PENDING;
|
||||
socket->received_ssl_shutdown = 0;
|
||||
return socket;
|
||||
}
|
||||
|
||||
|
||||
@@ -109,6 +109,51 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
|
||||
return internal_cb->loop;
|
||||
}
|
||||
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
#include <sys/syscall.h>
|
||||
static int has_epoll_pwait2 = -1;
|
||||
|
||||
#ifndef SYS_epoll_pwait2
|
||||
// It's consistent on multiple architectures
|
||||
// https://github.com/torvalds/linux/blob/9d1ddab261f3e2af7c384dc02238784ce0cf9f98/include/uapi/asm-generic/unistd.h#L795
|
||||
// https://github.com/google/gvisor/blob/master/test/syscalls/linux/epoll.cc#L48C1-L50C7
|
||||
#define SYS_epoll_pwait2 441
|
||||
#endif
|
||||
|
||||
static ssize_t sys_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout, const sigset_t *sigmask, size_t sigsetsize) {
|
||||
return syscall(SYS_epoll_pwait2, epfd, events, maxevents, timeout, sigmask, sigsetsize);
|
||||
}
|
||||
|
||||
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
|
||||
int ret;
|
||||
if (has_epoll_pwait2 != 0) {
|
||||
do {
|
||||
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, NULL, 0);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
if (LIKELY(ret != -1 || errno != ENOSYS)) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
has_epoll_pwait2 = 0;
|
||||
}
|
||||
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
|
||||
do {
|
||||
ret = epoll_wait(epfd, events, maxevents, timeoutMs);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/* Loop */
|
||||
struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size) {
|
||||
struct us_loop_t *loop = (struct us_loop_t *) us_calloc(1, sizeof(struct us_loop_t) + ext_size);
|
||||
@@ -139,9 +184,11 @@ void us_loop_run(struct us_loop_t *loop) {
|
||||
|
||||
/* Fetch ready polls */
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, -1);
|
||||
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, NULL);
|
||||
#else
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
|
||||
do {
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, NULL);
|
||||
} while (IS_EINTR(loop->num_ready_polls));
|
||||
#endif
|
||||
|
||||
/* Iterate ready polls, dispatching them by type */
|
||||
@@ -183,12 +230,6 @@ void us_loop_run(struct us_loop_t *loop) {
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(LIBUS_USE_EPOLL)
|
||||
|
||||
// static int has_epoll_pwait2 = 0;
|
||||
// TODO:
|
||||
|
||||
#endif
|
||||
|
||||
void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout) {
|
||||
if (loop->num_polls == 0)
|
||||
@@ -207,13 +248,12 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
|
||||
|
||||
/* Fetch ready polls */
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
int timeoutMs = -1;
|
||||
if (timeout) {
|
||||
timeoutMs = timeout->tv_sec * 1000 + timeout->tv_nsec / 1000000;
|
||||
}
|
||||
loop->num_ready_polls = epoll_wait(loop->fd, loop->ready_polls, 1024, timeoutMs);
|
||||
|
||||
loop->num_ready_polls = bun_epoll_pwait2(loop->fd, loop->ready_polls, 1024, timeout);
|
||||
#else
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
|
||||
do {
|
||||
loop->num_ready_polls = kevent64(loop->fd, NULL, 0, loop->ready_polls, 1024, 0, timeout);
|
||||
} while (IS_EINTR(loop->num_ready_polls));
|
||||
#endif
|
||||
|
||||
/* Iterate ready polls, dispatching them by type */
|
||||
@@ -296,7 +336,10 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
|
||||
int ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
// ret should be 0 in most cases (not guaranteed when removing async)
|
||||
|
||||
@@ -332,7 +375,10 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
struct epoll_event event;
|
||||
event.events = events;
|
||||
event.data.ptr = p;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
|
||||
int ret;
|
||||
do {
|
||||
ret = epoll_ctl(loop->fd, EPOLL_CTL_ADD, p->state.fd, &event);
|
||||
} while (IS_EINTR(ret));
|
||||
#else
|
||||
kqueue_change(loop->fd, p->state.fd, 0, events, p);
|
||||
#endif
|
||||
@@ -348,7 +394,10 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
struct epoll_event event;
|
||||
event.events = events;
|
||||
event.data.ptr = p;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
|
||||
int rc;
|
||||
do {
|
||||
rc = epoll_ctl(loop->fd, EPOLL_CTL_MOD, p->state.fd, &event);
|
||||
} while (IS_EINTR(rc));
|
||||
#else
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
|
||||
#endif
|
||||
@@ -362,7 +411,10 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
int new_events = 0;
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
struct epoll_event event;
|
||||
epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
|
||||
int rc;
|
||||
do {
|
||||
rc = epoll_ctl(loop->fd, EPOLL_CTL_DEL, p->state.fd, &event);
|
||||
} while (IS_EINTR(rc));
|
||||
#else
|
||||
if (old_events) {
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, new_events, NULL);
|
||||
@@ -373,12 +425,14 @@ void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, 0, old_events, new_events);
|
||||
}
|
||||
|
||||
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) {
|
||||
size_t us_internal_accept_poll_event(struct us_poll_t *p) {
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
int fd = us_poll_fd(p);
|
||||
uint64_t buf;
|
||||
int read_length = read(fd, &buf, 8);
|
||||
(void)read_length;
|
||||
ssize_t read_length = 0;
|
||||
do {
|
||||
read_length = read(fd, &buf, 8);
|
||||
} while (IS_EINTR(read_length));
|
||||
return buf;
|
||||
#else
|
||||
/* Kqueue has no underlying FD for timers or user events */
|
||||
@@ -467,7 +521,11 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
|
||||
|
||||
struct kevent64_s event;
|
||||
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
|
||||
/* (regular) sockets are the only polls which are not freed immediately */
|
||||
if(fallthrough){
|
||||
@@ -486,7 +544,11 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -581,7 +643,11 @@ void us_internal_async_close(struct us_internal_async *a) {
|
||||
struct kevent64_s event;
|
||||
uint64_t ptr = (uint64_t)(void*)internal_cb;
|
||||
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
|
||||
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
mach_port_deallocate(mach_task_self(), internal_cb->port);
|
||||
us_free(internal_cb->machport_buf);
|
||||
@@ -609,7 +675,10 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
|
||||
event.ext[1] = MACHPORT_BUF_LEN;
|
||||
event.udata = (uint64_t)(void*)internal_cb;
|
||||
|
||||
int ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
int ret;
|
||||
do {
|
||||
ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
|
||||
} while (IS_EINTR(ret));
|
||||
|
||||
if (UNLIKELY(ret == -1)) {
|
||||
abort();
|
||||
|
||||
@@ -125,7 +125,7 @@ int us_poll_events(struct us_poll_t *p) {
|
||||
((p->poll_type & POLL_TYPE_POLLING_OUT) ? LIBUS_SOCKET_WRITABLE : 0);
|
||||
}
|
||||
|
||||
unsigned int us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
|
||||
size_t us_internal_accept_poll_event(struct us_poll_t *p) { return 0; }
|
||||
|
||||
int us_internal_poll_type(struct us_poll_t *p) { return p->poll_type & POLL_TYPE_KIND_MASK; }
|
||||
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#pragma once
|
||||
#ifndef INTERNAL_H
|
||||
#define INTERNAL_H
|
||||
|
||||
@@ -22,6 +23,10 @@
|
||||
#ifndef __cplusplus
|
||||
#define alignas(x) __declspec(align(x))
|
||||
#endif
|
||||
|
||||
#include <BaseTsd.h>
|
||||
typedef SSIZE_T ssize_t;
|
||||
|
||||
#else
|
||||
#include <stdalign.h>
|
||||
#endif
|
||||
@@ -52,6 +57,17 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop,
|
||||
#include "internal/eventing/libuv.h"
|
||||
#endif
|
||||
|
||||
#ifndef LIKELY
|
||||
#define LIKELY(cond) __builtin_expect((_Bool)(cond), 1)
|
||||
#define UNLIKELY(cond) __builtin_expect((_Bool)(cond), 0)
|
||||
#endif
|
||||
|
||||
#ifdef _WIN32
|
||||
#define IS_EINTR(rc) (rc == SOCKET_ERROR && WSAGetLastError() == WSAEINTR)
|
||||
#else
|
||||
#define IS_EINTR(rc) (rc == -1 && errno == EINTR)
|
||||
#endif
|
||||
|
||||
/* Poll type and what it polls for */
|
||||
enum {
|
||||
/* Three first bits */
|
||||
@@ -118,7 +134,7 @@ void us_internal_async_set(struct us_internal_async *a,
|
||||
void us_internal_async_wakeup(struct us_internal_async *a);
|
||||
|
||||
/* Eventing related */
|
||||
unsigned int us_internal_accept_poll_event(struct us_poll_t *p);
|
||||
size_t us_internal_accept_poll_event(struct us_poll_t *p);
|
||||
int us_internal_poll_type(struct us_poll_t *p);
|
||||
void us_internal_poll_set_type(struct us_poll_t *p, int poll_type);
|
||||
|
||||
@@ -129,11 +145,15 @@ void us_internal_free_loop_ssl_data(struct us_loop_t *loop);
|
||||
/* Socket context related */
|
||||
void us_internal_socket_context_link_socket(struct us_socket_context_t *context,
|
||||
struct us_socket_t *s);
|
||||
void us_internal_socket_context_unlink_socket(
|
||||
void us_internal_socket_context_unlink_socket(int ssl,
|
||||
struct us_socket_context_t *context, struct us_socket_t *s);
|
||||
|
||||
void us_internal_socket_after_resolve(struct us_connecting_socket_t *s);
|
||||
void us_internal_socket_after_open(struct us_socket_t *s, int error);
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason);
|
||||
|
||||
int us_internal_handle_dns_results(struct us_loop_t *loop);
|
||||
|
||||
/* Sockets are polls */
|
||||
@@ -229,12 +249,13 @@ struct us_listen_socket_t {
|
||||
/* Listen sockets are keps in their own list */
|
||||
void us_internal_socket_context_link_listen_socket(
|
||||
struct us_socket_context_t *context, struct us_listen_socket_t *s);
|
||||
void us_internal_socket_context_unlink_listen_socket(
|
||||
void us_internal_socket_context_unlink_listen_socket(int ssl,
|
||||
struct us_socket_context_t *context, struct us_listen_socket_t *s);
|
||||
|
||||
struct us_socket_context_t {
|
||||
alignas(LIBUS_EXT_ALIGNMENT) struct us_loop_t *loop;
|
||||
uint32_t global_tick;
|
||||
uint32_t ref_count;
|
||||
unsigned char timestamp;
|
||||
unsigned char long_timestamp;
|
||||
struct us_socket_t *head_sockets;
|
||||
@@ -265,7 +286,8 @@ struct us_internal_ssl_socket_t;
|
||||
typedef void (*us_internal_on_handshake_t)(
|
||||
struct us_internal_ssl_socket_t *, int success,
|
||||
struct us_bun_verify_error_t verify_error, void *custom_data);
|
||||
|
||||
|
||||
void us_internal_socket_context_free(int ssl, struct us_socket_context_t *context);
|
||||
/* SNI functions */
|
||||
void us_internal_ssl_socket_context_add_server_name(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
|
||||
@@ -27,6 +27,7 @@ struct us_internal_loop_data_t {
|
||||
int last_write_failed;
|
||||
struct us_socket_context_t *head;
|
||||
struct us_socket_context_t *iterator;
|
||||
struct us_socket_context_t *closed_context_head;
|
||||
char *recv_buf;
|
||||
char *send_buf;
|
||||
void *ssl_data;
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
|
||||
#ifndef BSD_H
|
||||
#define BSD_H
|
||||
#pragma once
|
||||
|
||||
// top-most wrapper of bsd-like syscalls
|
||||
|
||||
@@ -25,7 +26,7 @@
|
||||
|
||||
#include "libusockets.h"
|
||||
|
||||
#ifdef _WIN32
|
||||
#ifdef _WIN32
|
||||
#ifndef NOMINMAX
|
||||
#define NOMINMAX
|
||||
#endif
|
||||
@@ -34,7 +35,7 @@
|
||||
#pragma comment(lib, "ws2_32.lib")
|
||||
#define SETSOCKOPT_PTR_TYPE const char *
|
||||
#define LIBUS_SOCKET_ERROR INVALID_SOCKET
|
||||
#else
|
||||
#else /* POSIX */
|
||||
#ifndef _GNU_SOURCE
|
||||
#define _GNU_SOURCE
|
||||
#endif
|
||||
@@ -134,9 +135,9 @@ int bsd_addr_get_port(struct bsd_addr_t *addr);
|
||||
// called by dispatch_ready_poll
|
||||
LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd_addr_t *addr);
|
||||
|
||||
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags);
|
||||
int bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more);
|
||||
int bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length);
|
||||
ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags);
|
||||
ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more);
|
||||
ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length);
|
||||
int bsd_would_block();
|
||||
|
||||
// return LIBUS_SOCKET_ERROR or the fd that represents listen socket
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
// clang-format off
|
||||
|
||||
#pragma once
|
||||
#ifndef us_calloc
|
||||
#define us_calloc calloc
|
||||
#endif
|
||||
@@ -49,6 +49,7 @@
|
||||
#define LIBUS_EXT_ALIGNMENT 16
|
||||
#define ALLOW_SERVER_RENEGOTIATION 0
|
||||
|
||||
#define LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN 0
|
||||
#define LIBUS_SOCKET_CLOSE_CODE_CONNECTION_RESET 1
|
||||
|
||||
/* Define what a socket descriptor is based on platform */
|
||||
@@ -229,8 +230,11 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
|
||||
struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop_t *loop,
|
||||
int ext_size, struct us_bun_socket_context_options_t options);
|
||||
|
||||
/* Delete resources allocated at creation time. */
|
||||
/* Delete resources allocated at creation time (will call unref now and only free when ref count == 0). */
|
||||
void us_socket_context_free(int ssl, struct us_socket_context_t *context);
|
||||
void us_socket_context_ref(int ssl, struct us_socket_context_t *context);
|
||||
void us_socket_context_unref(int ssl, struct us_socket_context_t *context);
|
||||
|
||||
struct us_bun_verify_error_t us_socket_verify_error(int ssl, struct us_socket_t *context);
|
||||
/* Setters of various async callbacks */
|
||||
void us_socket_context_on_open(int ssl, struct us_socket_context_t *context,
|
||||
|
||||
@@ -47,6 +47,8 @@ void us_internal_loop_data_init(struct us_loop_t *loop, void (*wakeup_cb)(struct
|
||||
loop->data.parent_ptr = 0;
|
||||
loop->data.parent_tag = 0;
|
||||
|
||||
loop->data.closed_context_head = 0;
|
||||
|
||||
loop->data.wakeup_async = us_internal_create_async(loop, 1, 0);
|
||||
us_internal_async_set(loop->data.wakeup_async, (void (*)(struct us_internal_async *)) wakeup_cb);
|
||||
}
|
||||
@@ -234,6 +236,15 @@ void us_internal_free_closed_sockets(struct us_loop_t *loop) {
|
||||
loop->data.closed_connecting_head = 0;
|
||||
}
|
||||
|
||||
void us_internal_free_closed_contexts(struct us_loop_t *loop) {
|
||||
for (struct us_socket_context_t *ctx = loop->data.closed_context_head; ctx; ) {
|
||||
struct us_socket_context_t *next = ctx->next;
|
||||
us_free(ctx);
|
||||
ctx = next;
|
||||
}
|
||||
loop->data.closed_context_head = 0;
|
||||
}
|
||||
|
||||
void sweep_timer_cb(struct us_internal_callback_t *cb) {
|
||||
us_internal_timer_sweep(cb->loop);
|
||||
}
|
||||
@@ -253,6 +264,7 @@ void us_internal_loop_pre(struct us_loop_t *loop) {
|
||||
void us_internal_loop_post(struct us_loop_t *loop) {
|
||||
us_internal_handle_dns_results(loop);
|
||||
us_internal_free_closed_sockets(loop);
|
||||
us_internal_free_closed_contexts(loop);
|
||||
loop->data.post_cb(loop);
|
||||
}
|
||||
|
||||
@@ -356,7 +368,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int events)
|
||||
s->context->loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */
|
||||
} else {
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(0, s->context, s);
|
||||
|
||||
/* Link this socket to the low-priority queue - we use a LIFO queue, to prioritize newer clients that are
|
||||
* maybe not already timeouted - sounds unfair, but works better in real-life with smaller client-timeouts
|
||||
@@ -411,7 +423,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int events)
|
||||
if (us_socket_is_shut_down(0, s)) {
|
||||
/* We got FIN back after sending it */
|
||||
/* Todo: We should give "CLEAN SHUTDOWN" as reason here */
|
||||
s = us_socket_close(0, s, 0, NULL);
|
||||
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);
|
||||
} else {
|
||||
/* We got FIN, so stop polling for readable */
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE);
|
||||
@@ -419,7 +431,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int events)
|
||||
}
|
||||
} else if (length == LIBUS_SOCKET_ERROR && !bsd_would_block()) {
|
||||
/* Todo: decide also here what kind of reason we should give */
|
||||
s = us_socket_close(0, s, 0, NULL);
|
||||
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -137,7 +137,7 @@ void us_connecting_socket_close(int ssl, struct us_connecting_socket_t *c) {
|
||||
c->closed = 1;
|
||||
|
||||
for (struct us_socket_t *s = c->connecting_head; s; s = s->connect_next) {
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
us_poll_stop((struct us_poll_t *) s, s->context->loop);
|
||||
bsd_close_socket(us_poll_fd((struct us_poll_t *) s));
|
||||
|
||||
@@ -157,6 +157,9 @@ void us_connecting_socket_close(int ssl, struct us_connecting_socket_t *c) {
|
||||
}
|
||||
|
||||
struct us_socket_t *us_socket_close(int ssl, struct us_socket_t *s, int code, void *reason) {
|
||||
if(ssl) {
|
||||
return (struct us_socket_t *)us_internal_ssl_socket_close((struct us_internal_ssl_socket_t *) s, code, reason);
|
||||
}
|
||||
if (!us_socket_is_closed(0, s)) {
|
||||
if (s->low_prio_state == 1) {
|
||||
/* Unlink this socket from the low-priority queue */
|
||||
@@ -169,7 +172,7 @@ struct us_socket_t *us_socket_close(int ssl, struct us_socket_t *s, int code, vo
|
||||
s->next = 0;
|
||||
s->low_prio_state = 0;
|
||||
} else {
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
}
|
||||
#ifdef LIBUS_USE_KQUEUE
|
||||
// kqueue automatically removes the fd from the set on close
|
||||
@@ -219,7 +222,7 @@ struct us_socket_t *us_socket_detach(int ssl, struct us_socket_t *s) {
|
||||
s->next = 0;
|
||||
s->low_prio_state = 0;
|
||||
} else {
|
||||
us_internal_socket_context_unlink_socket(s->context, s);
|
||||
us_internal_socket_context_unlink_socket(ssl, s->context, s);
|
||||
}
|
||||
us_poll_stop((struct us_poll_t *) s, s->context->loop);
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user