mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 22:32:06 +00:00
Compare commits
311 Commits
bun-v0.0.6
...
bun-v0.0.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c5dead232 | ||
|
|
0191cabe6e | ||
|
|
e1747c0dd7 | ||
|
|
e50ccc25db | ||
|
|
3a788fa035 | ||
|
|
9715c9223a | ||
|
|
ca6cdb4e98 | ||
|
|
e431dbe5bd | ||
|
|
3063df9b78 | ||
|
|
d8f174e54c | ||
|
|
51abe58a1a | ||
|
|
7f1415c7ae | ||
|
|
6cf3a08780 | ||
|
|
2b45c8dffe | ||
|
|
860d7e93c0 | ||
|
|
dabcac2e96 | ||
|
|
c03b7a6f19 | ||
|
|
909e6a6bab | ||
|
|
a2f91b167d | ||
|
|
b2b8c7f0b7 | ||
|
|
29554f4de3 | ||
|
|
735dc8f1d6 | ||
|
|
a9577c9ba8 | ||
|
|
a89726b30c | ||
|
|
3225a4e7e8 | ||
|
|
432e823f7e | ||
|
|
62541f1ac8 | ||
|
|
faf137b9be | ||
|
|
3c9e30ffcd | ||
|
|
b22a3f5973 | ||
|
|
4232000020 | ||
|
|
475aab6cc3 | ||
|
|
dddd9c23e4 | ||
|
|
1064b9dada | ||
|
|
baffe26dd1 | ||
|
|
1993f9f9a5 | ||
|
|
7f00482cfe | ||
|
|
b2a69a35b8 | ||
|
|
a52a948a70 | ||
|
|
94cbfa4579 | ||
|
|
1a4ccd3f5c | ||
|
|
68cb6130d3 | ||
|
|
64b74ede87 | ||
|
|
213960a04a | ||
|
|
170e58a99d | ||
|
|
f3c4bfcbae | ||
|
|
fa971895a8 | ||
|
|
a5a9f8e821 | ||
|
|
0a547304a7 | ||
|
|
45e7bb0027 | ||
|
|
5d7c34093f | ||
|
|
9322cec8f2 | ||
|
|
047501999d | ||
|
|
30f5e0d37c | ||
|
|
205a6d45b5 | ||
|
|
d47e0de175 | ||
|
|
8c470194ce | ||
|
|
410a6bd32d | ||
|
|
af69b47c22 | ||
|
|
fb2c7e5f38 | ||
|
|
4a1c195b24 | ||
|
|
1489e48950 | ||
|
|
5922ba29ac | ||
|
|
1de275325e | ||
|
|
14301284af | ||
|
|
adeb880d7f | ||
|
|
857e9bee00 | ||
|
|
711e0cef78 | ||
|
|
73449bf433 | ||
|
|
4aabccfc79 | ||
|
|
1e2a61c6a0 | ||
|
|
844fae826f | ||
|
|
ef8d1a95a4 | ||
|
|
bf4943eec1 | ||
|
|
97d17904d3 | ||
|
|
98af486b89 | ||
|
|
8f27b27a0e | ||
|
|
aced5882d1 | ||
|
|
839fd03472 | ||
|
|
fb5a90cb1b | ||
|
|
64a83f1427 | ||
|
|
f684a16051 | ||
|
|
2666b45f2a | ||
|
|
aad33c2611 | ||
|
|
ff2335a910 | ||
|
|
32733210dc | ||
|
|
9f5a1705a5 | ||
|
|
b4c1cea735 | ||
|
|
2572636004 | ||
|
|
3a962f3bad | ||
|
|
bfa2bc6736 | ||
|
|
354ed2880c | ||
|
|
9db4d195a7 | ||
|
|
0808f29375 | ||
|
|
7dbfb3ef41 | ||
|
|
333bccee5d | ||
|
|
25c35e59b4 | ||
|
|
ff7785e023 | ||
|
|
0e138bcc8f | ||
|
|
ecea12d206 | ||
|
|
61d1c7b6b2 | ||
|
|
b5cec4b704 | ||
|
|
e895605e5f | ||
|
|
f5a79ea6d2 | ||
|
|
de0cf42111 | ||
|
|
ec9e4eb97e | ||
|
|
47b6a82920 | ||
|
|
28f77b9823 | ||
|
|
4ac8c1bd84 | ||
|
|
962b7fa8d9 | ||
|
|
d6ebb478e3 | ||
|
|
2b8893da53 | ||
|
|
744f52fb27 | ||
|
|
43b9f9462f | ||
|
|
d89aa9a9e6 | ||
|
|
084d78be29 | ||
|
|
9c627e884d | ||
|
|
da83583cdf | ||
|
|
4d9ae0df06 | ||
|
|
4d95e44317 | ||
|
|
4b7235ac5f | ||
|
|
4152c1b177 | ||
|
|
dc5745080d | ||
|
|
3dfac788fa | ||
|
|
c56ff7efb4 | ||
|
|
114c560912 | ||
|
|
1576d183c9 | ||
|
|
0e0bfe91d0 | ||
|
|
d3559d1cca | ||
|
|
222a1cc9e8 | ||
|
|
03103925bd | ||
|
|
9e470cb48e | ||
|
|
376602a65c | ||
|
|
8493cfe983 | ||
|
|
d11ac34dcb | ||
|
|
08bc06dfe0 | ||
|
|
f4fbf84294 | ||
|
|
56ae4bcb55 | ||
|
|
ec61dc0665 | ||
|
|
b21b0ea849 | ||
|
|
318c423e14 | ||
|
|
834ff71157 | ||
|
|
39a187d32e | ||
|
|
146d2cc231 | ||
|
|
87c95d45f7 | ||
|
|
f7f2f6e6b8 | ||
|
|
3e803b3a58 | ||
|
|
22c01ca14c | ||
|
|
111f0921f5 | ||
|
|
8c515c7077 | ||
|
|
04579909ff | ||
|
|
40ff3e4618 | ||
|
|
c0a446df02 | ||
|
|
8f7cf6bf47 | ||
|
|
4a7b5892af | ||
|
|
9a5aa95893 | ||
|
|
8d623e21b6 | ||
|
|
7a87e41ab8 | ||
|
|
f62f184241 | ||
|
|
8ea69cb5d1 | ||
|
|
97f0cef391 | ||
|
|
85084c6db9 | ||
|
|
a09b995651 | ||
|
|
4098484ff5 | ||
|
|
91b24c6032 | ||
|
|
38d0f0bbd0 | ||
|
|
d3e39632bf | ||
|
|
43ae61e095 | ||
|
|
bd31c88814 | ||
|
|
02c59df1b3 | ||
|
|
f6c05ef7a1 | ||
|
|
505e4b80fd | ||
|
|
cd8d88716f | ||
|
|
d3a93d5273 | ||
|
|
ed9637de50 | ||
|
|
4e744c057b | ||
|
|
4b717fe554 | ||
|
|
236a0fde35 | ||
|
|
306d309209 | ||
|
|
a8f4cd271e | ||
|
|
3cd129544e | ||
|
|
1004e924ff | ||
|
|
c013ede227 | ||
|
|
914b363a37 | ||
|
|
8ad349dcf0 | ||
|
|
7146bfc04a | ||
|
|
e852bc0980 | ||
|
|
8a9b1416ed | ||
|
|
9da7a1ff0a | ||
|
|
219aa57fbd | ||
|
|
5930ac09b7 | ||
|
|
8837c3c10b | ||
|
|
0d0dd65a6a | ||
|
|
0ac9c4276f | ||
|
|
0edf6fd1e4 | ||
|
|
c9d6c25f71 | ||
|
|
73fed6c093 | ||
|
|
75f238fcb3 | ||
|
|
8706f77eb7 | ||
|
|
3cf0689d46 | ||
|
|
623f77e097 | ||
|
|
61d6f8f18f | ||
|
|
6480193c65 | ||
|
|
23a31b3771 | ||
|
|
112b352b03 | ||
|
|
a48c20d470 | ||
|
|
5fd47bc613 | ||
|
|
6966d94d90 | ||
|
|
5311219de7 | ||
|
|
4119f60010 | ||
|
|
0960f3d6d1 | ||
|
|
64b49ddd95 | ||
|
|
2c27526a34 | ||
|
|
fe6b340382 | ||
|
|
8148b8824f | ||
|
|
be0c4b5451 | ||
|
|
168c6138d8 | ||
|
|
b9555a5775 | ||
|
|
b78fff9386 | ||
|
|
963c89cab2 | ||
|
|
d7ef6efd04 | ||
|
|
35fef33868 | ||
|
|
8a9e81d514 | ||
|
|
b17df61043 | ||
|
|
4e0cdf34b6 | ||
|
|
a97dec5c30 | ||
|
|
ad0834bedb | ||
|
|
b53c63910d | ||
|
|
5c0908ee52 | ||
|
|
7f33846005 | ||
|
|
4afda10b82 | ||
|
|
cc0e581bc3 | ||
|
|
df1aa76cc5 | ||
|
|
a3a2fb1e4f | ||
|
|
915dadd9d7 | ||
|
|
d899e0ac6f | ||
|
|
3915e01cfb | ||
|
|
2fc6da125f | ||
|
|
ba4013816d | ||
|
|
b64113b7d9 | ||
|
|
a9483dd8dc | ||
|
|
5f2fb86a76 | ||
|
|
386ba990bf | ||
|
|
7f1b5c09de | ||
|
|
fcfdd71c06 | ||
|
|
73336bbc00 | ||
|
|
e7fa50635f | ||
|
|
fddec80755 | ||
|
|
dac3389b27 | ||
|
|
c952eb7dea | ||
|
|
6a28cfd2ba | ||
|
|
e4693b8aaf | ||
|
|
87e78e2a48 | ||
|
|
6ac9b5fa9d | ||
|
|
71449c8638 | ||
|
|
a17088363f | ||
|
|
83004f0a0a | ||
|
|
c42fbbd10e | ||
|
|
d068d80ab9 | ||
|
|
eb142a6be5 | ||
|
|
9fde2b28e1 | ||
|
|
195723180b | ||
|
|
2e051cebaa | ||
|
|
795d0b056c | ||
|
|
d7c69d3b78 | ||
|
|
059aa425b7 | ||
|
|
aaf5c28c5b | ||
|
|
3083bbadc1 | ||
|
|
150ca5d686 | ||
|
|
58d1e2e1be | ||
|
|
090bae9a94 | ||
|
|
113f0427ab | ||
|
|
36520d6ec1 | ||
|
|
2a7d8df4c5 | ||
|
|
ac6a2bf2e5 | ||
|
|
3c1cb806d4 | ||
|
|
e5587c2b31 | ||
|
|
8128c79e81 | ||
|
|
ae5e2f5af2 | ||
|
|
a8b7b2a6b9 | ||
|
|
111bef7f06 | ||
|
|
6f1c6fbc1a | ||
|
|
31cd05191d | ||
|
|
efb8c4e36b | ||
|
|
ed71379aa1 | ||
|
|
b1c3fce49b | ||
|
|
e75c711c68 | ||
|
|
8d031f13c0 | ||
|
|
42a73f91fe | ||
|
|
c508fd9b13 | ||
|
|
c62949b98e | ||
|
|
0b640c785f | ||
|
|
fe17d51b03 | ||
|
|
565cf23d92 | ||
|
|
020e2e00c3 | ||
|
|
f4870050e0 | ||
|
|
f31e67b13d | ||
|
|
6f4d48cc6c | ||
|
|
fb34ebc674 | ||
|
|
6c867c34d7 | ||
|
|
70a56028b6 | ||
|
|
08327e2748 | ||
|
|
6d6b7f93f3 | ||
|
|
0d95cf8f5b | ||
|
|
471f9aec19 | ||
|
|
b5beb20e08 | ||
|
|
c8b6e2d5cd | ||
|
|
8ccb833421 | ||
|
|
93ec969005 | ||
|
|
5a42b59e91 | ||
|
|
266870fee8 |
@@ -1,7 +1,7 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
|
||||
{
|
||||
"name": "Bun (Ubuntu)",
|
||||
"name": "bun (Ubuntu)",
|
||||
|
||||
// Sets the run context to one level up instead of the .devcontainer folder.
|
||||
"context": "..",
|
||||
@@ -14,10 +14,7 @@
|
||||
"terminal.integrated.shell.linux": "/bin/zsh",
|
||||
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
|
||||
"zig.zigPath": "/build/zig/zig",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"[zig]": {
|
||||
"editor.defaultFormatter": "tiehuis.zig"
|
||||
}
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "To get started, login to GitHub and clone Bun's GitHub repo into /workspaces/bun"
|
||||
echo "To get started, login to GitHub and clone bun's GitHub repo into /workspaces/bun"
|
||||
echo "Make sure to login with a Personal Access Token"
|
||||
echo "# First time setup"
|
||||
echo "gh auth login"
|
||||
@@ -9,7 +9,7 @@ echo ""
|
||||
echo "# Compile bun dependencies (zig is already compiled)"
|
||||
echo "make devcontainer"
|
||||
echo ""
|
||||
echo "# Build Bun for development"
|
||||
echo "# Build bun for development"
|
||||
echo "make dev"
|
||||
echo ""
|
||||
echo "# Run bun"
|
||||
|
||||
@@ -3,6 +3,5 @@
|
||||
curl -L https://github.com/Jarred-Sumner/vscode-zig/releases/download/fork-v1/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
git clone https://github.com/zigtools/zls /home/ubuntu/zls
|
||||
cd /home/ubuntu/zls
|
||||
git checkout e472fca3be6335f16032b48e40ca0d5ffda6ab0a
|
||||
git submodule update --init --recursive --progress --depth=1
|
||||
zig build -Drelease-fast
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"folders": [
|
||||
{
|
||||
// Source code
|
||||
"name": "Bun",
|
||||
"name": "bun",
|
||||
"path": "bun"
|
||||
},
|
||||
]
|
||||
|
||||
15
.docker/build-base-images.sh
Normal file
15
.docker/build-base-images.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
docker login --username bunbunbunbun
|
||||
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-test-base --target bun-test-base . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base-with-zig-and-webkit --target bun-base-with-zig-and-webkit . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base --target bun-base --platform=linux/$BUILDARCH . --build-arg BUILDARCH=$BUILDARCH
|
||||
|
||||
docker push bunbunbunbun/bun-test-base:latest
|
||||
docker push bunbunbunbun/bun-base-with-zig-and-webkit:latest
|
||||
docker push bunbunbunbun/bun-base:latest
|
||||
62
.github/workflows/bun.yml
vendored
62
.github/workflows/bun.yml
vendored
@@ -22,8 +22,9 @@ env:
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
e2e:
|
||||
runs-on: self-hosted
|
||||
name: "Integration tests"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
@@ -38,16 +39,6 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
- name: Pull Base Image
|
||||
run: bash .docker/pull.sh
|
||||
- name: Build Release Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
target: release
|
||||
tags: bun:${{github.sha}}
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
load: true
|
||||
- name: Build tests
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -88,10 +79,49 @@ jobs:
|
||||
BUN_TEST_NAME: test-bun-run
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
|
||||
- name: Run test-bun-install
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-bun-install
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
# This is commented out because zig test does not work on the CI
|
||||
# Which sucks
|
||||
# zig-unit-tests:
|
||||
# runs-on: self-hosted
|
||||
# name: "Unit tests (Zig)"
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Checkout submodules
|
||||
# run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v1
|
||||
# - name: Login to Dockerhub
|
||||
# uses: docker/login-action@v1
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
# - name: Pull Base Image
|
||||
# run: bash .docker/pull.sh
|
||||
# - name: Build tests
|
||||
# uses: docker/build-push-action@v2
|
||||
# with:
|
||||
# context: .
|
||||
# target: build_unit
|
||||
# tags: bun-unit-tests:latest
|
||||
# load: true
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
# builder: ${{ steps.buildx.outputs.name }}
|
||||
# - name: Run tests
|
||||
# env:
|
||||
# GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
# RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
# run: bash .docker/unit-tests.sh
|
||||
release:
|
||||
runs-on: self-hosted
|
||||
needs: tests
|
||||
needs: ["e2e"]
|
||||
if: github.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -128,9 +158,9 @@ jobs:
|
||||
platforms: |
|
||||
linux/amd64
|
||||
labels: |
|
||||
org.opencontainers.image.title=Bun
|
||||
org.opencontainers.image.description=Bun is a fast bundler, transpiler, JavaScript Runtime environment and package manager for web software. The image is an Ubuntu 20.04 image with bun preinstalled into /opt/bun.
|
||||
org.opencontainers.image.vendor=Bun
|
||||
org.opencontainers.image.title=bun
|
||||
org.opencontainers.image.description=bun is a fast bundler, transpiler, JavaScript Runtime environment and package manager for web software. The image is an Ubuntu 20.04 image with bun preinstalled into /opt/bun.
|
||||
org.opencontainers.image.vendor=bun
|
||||
org.opencontainers.image.source=https://github.com/Jarred-Sumner/bun
|
||||
org.opencontainers.image.url=https://bun.sh
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -12,6 +12,7 @@ yarn.lock
|
||||
dist
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
@@ -83,3 +84,8 @@ src/deps/s2n-tls
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
|
||||
8
.gitmodules
vendored
8
.gitmodules
vendored
@@ -11,7 +11,7 @@
|
||||
ignore = dirty
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/microsoft/mimalloc.git
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/zlib"]
|
||||
path = src/deps/zlib
|
||||
@@ -24,4 +24,8 @@
|
||||
[submodule "src/deps/boringssl"]
|
||||
path = src/deps/boringssl
|
||||
url = https://github.com/google/boringssl.git
|
||||
ignore = dirty
|
||||
ignore = dirty
|
||||
[submodule "src/deps/libbacktrace"]
|
||||
path = src/deps/libbacktrace
|
||||
url = https://github.com/ianlancetaylor/libbacktrace
|
||||
ignore = dirty
|
||||
|
||||
23
.scripts/write-versions.sh
Normal file
23
.scripts/write-versions.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(git rev-parse HEAD:./src/javascript/jsc/WebKit)
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
107
.vscode/launch.json
generated
vendored
107
.vscode/launch.json
generated
vendored
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "HTTP bench",
|
||||
"program": "${workspaceFolder}/misctools/http_bench",
|
||||
"args": ["https://example.com", "--count=80"],
|
||||
"args": ["https://twitter.com", "--count=100"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -16,7 +15,34 @@
|
||||
"request": "launch",
|
||||
"name": "fetch debug",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://lodash.com", "--verbose"],
|
||||
"args": ["https://example.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #2",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://twitter.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #13w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["http://127.0.0.1:8080/next.json", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #12w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://registry.npmjs.org/next", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -61,8 +87,17 @@
|
||||
"request": "launch",
|
||||
"name": "bun create debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["create", "hanford/trends", "foo"],
|
||||
"cwd": "/tmp/",
|
||||
"args": ["create", "next", "foo", "--open", "--force"],
|
||||
"cwd": "/tmp",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun dev debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["dev"],
|
||||
"cwd": "/tmp/foo",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -70,8 +105,9 @@
|
||||
"request": "launch",
|
||||
"name": "bun run debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["paoskdpoasdk"],
|
||||
"cwd": "/tmp/",
|
||||
"args": ["run", "/tmp/bar.js"],
|
||||
// "args": ["--version"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -231,17 +267,49 @@
|
||||
"request": "launch",
|
||||
"name": "Dazzle serve",
|
||||
"program": "bun-debug",
|
||||
"args": ["--origin=http://localhost:5001", "--disable-bun.js", "--disable-hmr"],
|
||||
"args": [
|
||||
"--origin=http://localhost:5001",
|
||||
"--disable-bun.js",
|
||||
"--disable-hmr"
|
||||
],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Bun",
|
||||
"name": "Dazzle bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole",
|
||||
"env": { "GOMAXPROCS": "1" }
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run",
|
||||
"program": "bun-debug",
|
||||
"args": ["cat.js", "./node_modules/@babel/standalone/babel.js"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run callback bench",
|
||||
"program": "bun-debug",
|
||||
"args": ["/Users/jarred/Code/bun/bench/snippets/callbacks-overhead.mjs"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest", "import-meta"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -327,7 +395,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context Bun Bug",
|
||||
"name": "Context bun Bug",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "./code.js"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -336,7 +404,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context Bun",
|
||||
"name": "Context bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -345,7 +413,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Bun-hello",
|
||||
"name": "bun-hello",
|
||||
"program": "bun-debug",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}/packages/bun-hello",
|
||||
@@ -367,9 +435,7 @@
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "--backend=clonefile", "--force"],
|
||||
"cwd": "/Users/jarred/Build/octokit-test",
|
||||
"env": {
|
||||
|
||||
},
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -379,8 +445,7 @@
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "typescript"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {
|
||||
},
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -390,8 +455,7 @@
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "react"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {
|
||||
},
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -401,8 +465,7 @@
|
||||
"program": "bun-debug",
|
||||
"args": ["remove", "foo"],
|
||||
"cwd": "/Users/jarred/Build/athena.yarn",
|
||||
"env": {
|
||||
},
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -414,7 +477,6 @@
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {
|
||||
"BUN_CONFIG_SKIP_SAVE_LOCKFILE": "1"
|
||||
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -530,7 +592,6 @@
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
|
||||
// {
|
||||
// "type": "lldb",
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -8,7 +8,13 @@
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
"[zig]": {
|
||||
"editor.defaultFormatter": "tiehuis.zig"
|
||||
"editor.defaultFormatter": "AugusteRame.zls-vscode"
|
||||
},
|
||||
"[ts]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[tsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
|
||||
"zig.testCmd": "make test ${file} ${filter} ${bin}",
|
||||
|
||||
60
Dockerfile
60
Dockerfile
@@ -53,6 +53,25 @@ WORKDIR $BUN_DIR
|
||||
RUN cd $BUN_DIR && \
|
||||
make libarchive && rm -rf src/deps/libarchive Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as libbacktrace
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libbacktrace ${BUN_DIR}/src/deps/libbacktrace
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make libbacktrace && rm -rf src/deps/libbacktrace Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as boringssl
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
@@ -159,8 +178,10 @@ COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer
|
||||
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
|
||||
|
||||
WORKDIR ${BUN_DIR}
|
||||
|
||||
@@ -176,6 +197,8 @@ ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && rm -rf $HOME/.cache zig-cache && make \
|
||||
@@ -186,7 +209,7 @@ RUN cd $BUN_DIR && rm -rf $HOME/.cache zig-cache && make \
|
||||
fallback_decoder && rm -rf $HOME/.cache zig-cache && \
|
||||
mkdir -p $BUN_RELEASE_DIR && \
|
||||
make release copy-to-bun-release-dir && \
|
||||
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig
|
||||
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages
|
||||
|
||||
FROM prepare_release as build_unit
|
||||
|
||||
@@ -201,11 +224,9 @@ ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENTRYPOINT [ "/bin/bash" ]
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
|
||||
CMD cd $BUN_DIR && \
|
||||
make \
|
||||
jsc-bindings-headers \
|
||||
CMD make jsc-bindings-headers \
|
||||
api \
|
||||
analytics \
|
||||
bun_error \
|
||||
@@ -236,7 +257,7 @@ ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
|
||||
ENV BUN_INSTALL /home/ubuntu/.bun
|
||||
ENV XDG_CONFIG_HOME /home/ubuntu/.config
|
||||
|
||||
RUN update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-12 90
|
||||
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-13 90
|
||||
|
||||
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
@@ -250,6 +271,31 @@ RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
|
||||
bash /scripts/zig-env.sh
|
||||
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json
|
||||
|
||||
FROM ubuntu:20.04 as release_with_debug_info
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun-profile /opt/bun/bin/bun-profile
|
||||
|
||||
WORKDIR /opt/bun
|
||||
|
||||
ENTRYPOINT [ "/opt/bun/bin/bun" ]
|
||||
|
||||
FROM ubuntu:20.04 as release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
@@ -266,7 +312,7 @@ COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="Bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
WORKDIR /opt/bun
|
||||
|
||||
@@ -18,7 +18,7 @@ RUN apt-get update && \
|
||||
add-apt-repository ppa:longsleep/golang-backports && \
|
||||
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
|
||||
chmod +x llvm.sh && \
|
||||
./llvm.sh 12 && \
|
||||
./llvm.sh 13 && \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
@@ -30,29 +30,29 @@ RUN apt-get update && \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-12-dev \
|
||||
libclang-12-dev \
|
||||
liblld-13-dev \
|
||||
libclang-13-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
npm \
|
||||
clang-12 \
|
||||
clang-format-12 \
|
||||
libc++-12-dev \
|
||||
libc++abi-12-dev \
|
||||
lld-12 \
|
||||
clang-13 \
|
||||
clang-format-13 \
|
||||
libc++-13-dev \
|
||||
libc++abi-13-dev \
|
||||
lld-13 \
|
||||
libicu-dev \
|
||||
wget \
|
||||
unzip \
|
||||
tar \
|
||||
golang-go ninja-build pkg-config automake autoconf libtool curl && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-12 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-12 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-12 90 && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-13 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-13 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-13 90 && \
|
||||
npm install -g esbuild
|
||||
|
||||
ENV CC=clang-12
|
||||
ENV CXX=clang++-12
|
||||
ENV CC=clang-13
|
||||
ENV CXX=clang++-13
|
||||
|
||||
|
||||
ARG BUILDARCH=amd64
|
||||
@@ -69,7 +69,7 @@ ENV BUN_DEPS_OUT_DIR ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
LABEL org.opencontainers.image.title="Bun base image ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.title="bun base image ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
@@ -88,12 +88,12 @@ ARG BUILDARCH=amd64
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/Jarred-Sumner/zig/releases/download/dec20/zig-linux-$BUILDARCH.zip && \
|
||||
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/Jarred-Sumner/zig/releases/download/jan17/zig-linux-$BUILDARCH.zip && \
|
||||
unzip -q zig-linux-$BUILDARCH.zip && \
|
||||
rm zig-linux-$BUILDARCH.zip;
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/Jarred-Sumner/WebKit/releases/download/Bun-v0/bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/Jarred-Sumner/WebKit/releases/download/Bun-v0-llvm13/bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
|
||||
@@ -108,7 +108,7 @@ RUN cd $GITHUB_WORKSPACE && \
|
||||
|
||||
ENV ZIG "${ZIG_PATH}/zig"
|
||||
|
||||
LABEL org.opencontainers.image.title="Bun base image with zig & webkit ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
|
||||
@@ -119,7 +119,7 @@
|
||||
# ENV BUN_INSTALL /opt/bun
|
||||
# ENV PATH /opt/bun/bin:$PATH
|
||||
|
||||
# LABEL org.opencontainers.image.title="Bun - Linux ${BUILDARCH} (musl)"
|
||||
# LABEL org.opencontainers.image.title="bun - Linux ${BUILDARCH} (musl)"
|
||||
# LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
# FROM release as test
|
||||
|
||||
247
Makefile
247
Makefile
@@ -11,6 +11,7 @@ endif
|
||||
|
||||
MIN_MACOS_VERSION = 10.14
|
||||
|
||||
|
||||
MARCH_NATIVE =
|
||||
|
||||
ARCH_NAME :=
|
||||
@@ -40,11 +41,11 @@ PACKAGE_JSON_VERSION = 0.0.$(BUILD_ID)
|
||||
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
|
||||
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
|
||||
PRETTIER ?= $(shell which prettier || echo "./node_modules/.bin/prettier")
|
||||
|
||||
DSYMUTIL ?= $(shell which dsymutil || which dsymutil-13)
|
||||
WEBKIT_DIR ?= $(realpath src/javascript/jsc/WebKit)
|
||||
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
|
||||
|
||||
NPM_CLIENT ?= $(shell which npm)
|
||||
NPM_CLIENT ?= $(shell which bun || which npm)
|
||||
ZIG ?= $(shell which zig || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable")
|
||||
|
||||
# We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore
|
||||
@@ -52,8 +53,17 @@ ZIG ?= $(shell which zig || echo -e "error: Missing zig. Please make sure zig is
|
||||
# This is easier to happen than you'd expect.
|
||||
# Using realpath here causes issues because clang uses clang++ as a symlink
|
||||
# so if that's resolved, it won't build for C++
|
||||
CC = $(shell which clang-12 || which clang)
|
||||
CXX = $(shell which clang++-12 || which clang++)
|
||||
CC = $(shell which clang-13 || which clang)
|
||||
CXX = $(shell which clang++-13 || which clang++)
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
LLVM_PREFIX = $(shell brew --prefix llvm)
|
||||
LDFLAGS += " -L$(LLVM_PREFIX)/lib"
|
||||
CPPFLAGS += " -I$(LLVM_PREFIX)/include"
|
||||
CC = $(LLVM_PREFIX)/bin/clang
|
||||
CXX = $(LLVM_PREFIX)/bin/clang++
|
||||
CODESIGN_IDENTITY ?= $(shell security find-identity -v -p codesigning | awk '/Apple Development/ { print $$2 }')
|
||||
endif
|
||||
|
||||
# macOS sed is different
|
||||
SED = $(shell which gsed || which sed)
|
||||
@@ -73,7 +83,7 @@ OPENSSL_LINUX_DIR = $(BUN_DEPS_DIR)/openssl/openssl-OpenSSL_1_1_1l
|
||||
|
||||
CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) -DCMAKE_CXX_COMPILER=$(CXX) -DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION)
|
||||
CMAKE_FLAGS = $(CMAKE_FLAGS_WITHOUT_RELEASE) -DCMAKE_BUILD_TYPE=Release
|
||||
CFLAGS = $(MACOS_MIN_FLAG)
|
||||
|
||||
|
||||
LIBTOOL=libtoolize
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
@@ -84,6 +94,8 @@ ifeq ($(OS_NAME),linux)
|
||||
LIBICONV_PATH =
|
||||
endif
|
||||
|
||||
|
||||
CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) -ffunction-sections -fdata-sections -g -O3
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
BUN_DEPLOY_DIR = /tmp/bun-v$(PACKAGE_JSON_VERSION)/$(PACKAGE_NAME)
|
||||
|
||||
@@ -111,10 +123,12 @@ ZLIB_LIB_DIR ?= $(BUN_DEPS_DIR)/zlib
|
||||
|
||||
JSC_FILES := $(JSC_LIB)/libJavaScriptCore.a $(JSC_LIB)/libWTF.a $(JSC_LIB)/libbmalloc.a
|
||||
|
||||
ENABLE_MIMALLOC ?= 1
|
||||
|
||||
# https://github.com/microsoft/mimalloc/issues/512
|
||||
# Linking mimalloc via object file on macOS x64 can cause heap corruption
|
||||
MIMALLOC_FILE = libmimalloc.o
|
||||
MIMALLOC_INPUT_PATH = CMakeFiles/mimalloc-obj.dir/src/static.c.o
|
||||
_MIMALLOC_FILE = libmimalloc.o
|
||||
_MIMALLOC_INPUT_PATH = CMakeFiles/mimalloc-obj.dir/src/static.c.o
|
||||
|
||||
DEFAULT_LINKER_FLAGS =
|
||||
|
||||
@@ -125,17 +139,29 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
|
||||
endif
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
|
||||
MIMALLOC_FILE = libmimalloc.a
|
||||
MIMALLOC_INPUT_PATH = libmimalloc.a
|
||||
_MIMALLOC_FILE = libmimalloc.a
|
||||
_MIMALLOC_INPUT_PATH = libmimalloc.a
|
||||
endif
|
||||
|
||||
MIMALLOC_FILE=
|
||||
MIMALLOC_INPUT_PATH=
|
||||
MIMALLOC_FILE_PATH=
|
||||
ifeq ($(ENABLE_MIMALLOC), 1)
|
||||
MIMALLOC_FILE=$(_MIMALLOC_FILE)
|
||||
MIMALLOC_FILE_PATH=$(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
MIMALLOC_INPUT_PATH=$(_MIMALLOC_INPUT_PATH)
|
||||
endif
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
MACOSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION)
|
||||
MACOS_MIN_FLAG=
|
||||
|
||||
POSIX_PKG_MANAGER=sudo apt
|
||||
|
||||
STRIP ?= $(shell which llvm-strip || which llvm-strip-12 || echo "Missing llvm-strip. Please pass it in the STRIP environment var"; exit 1;)
|
||||
STRIP ?= $(shell which llvm-strip || which llvm-strip-13 || echo "Missing llvm-strip. Please pass it in the STRIP environment var"; exit 1;)
|
||||
|
||||
HOMEBREW_PREFIX ?= $(BREW_PREFIX_PATH)
|
||||
|
||||
@@ -207,25 +233,35 @@ CLANG_FLAGS = $(INCLUDE_DIRS) \
|
||||
-DENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0 \
|
||||
-DBUILDING_JSCONLY__ \
|
||||
-DASSERT_ENABLED=0 \
|
||||
-fPIE
|
||||
-fvisibility=hidden \
|
||||
-fvisibility-inlines-hidden \
|
||||
-fno-omit-frame-pointer $(CFLAGS)
|
||||
|
||||
# This flag is only added to webkit builds on Apple platforms
|
||||
# It has something to do with ICU
|
||||
ifeq ($(OS_NAME), darwin)
|
||||
CLANG_FLAGS += -DDU_DISABLE_RENAMING=1 \
|
||||
$(MACOS_MIN_FLAG) -lstdc++
|
||||
-lstdc++ \
|
||||
-ffunction-sections \
|
||||
-fdata-sections \
|
||||
-Wl,-no_eh_labels \
|
||||
-Wl,-dead_strip \
|
||||
-Wl,-dead_strip_dylibs \
|
||||
-force_flat_namespace
|
||||
endif
|
||||
|
||||
|
||||
|
||||
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE) \
|
||||
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MIMALLOC_FILE_PATH) \
|
||||
$(BUN_DEPS_OUT_DIR)/libz.a \
|
||||
$(BUN_DEPS_OUT_DIR)/libarchive.a \
|
||||
$(BUN_DEPS_OUT_DIR)/libssl.a \
|
||||
$(BUN_DEPS_OUT_DIR)/picohttpparser.o \
|
||||
$(BUN_DEPS_OUT_DIR)/libbacktrace.a
|
||||
|
||||
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO) $(BUN_DEPS_OUT_DIR)/libcrypto.boring.a
|
||||
|
||||
|
||||
PLATFORM_LINKER_FLAGS =
|
||||
|
||||
STATIC_MUSL_FLAG ?=
|
||||
@@ -242,9 +278,15 @@ PLATFORM_LINKER_FLAGS = \
|
||||
-fdata-sections \
|
||||
-static-libstdc++ \
|
||||
-static-libgcc \
|
||||
-Wl,--compress-debug-sections,zlib \
|
||||
${STATIC_MUSL_FLAG}
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME), darwin)
|
||||
PLATFORM_LINKER_FLAGS = \
|
||||
-Wl,-keep_private_externs
|
||||
endif
|
||||
|
||||
|
||||
BUN_LLD_FLAGS = $(OBJ_FILES) \
|
||||
${ICU_FLAGS} \
|
||||
@@ -255,27 +297,39 @@ BUN_LLD_FLAGS = $(OBJ_FILES) \
|
||||
$(DEFAULT_LINKER_FLAGS) \
|
||||
$(PLATFORM_LINKER_FLAGS)
|
||||
|
||||
|
||||
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
|
||||
CLANG_VERSION = $(shell $(CC) --version | awk '/version/ {for(i=1; i<=NF; i++){if($$i=="version"){split($$(i+1),v,".");print v[1]}}}')
|
||||
|
||||
|
||||
vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive
|
||||
bun:
|
||||
|
||||
|
||||
vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive libbacktrace
|
||||
|
||||
boringssl-build:
|
||||
cd $(BUN_DEPS_DIR)/boringssl && mkdir -p build && cd build && cmake $(CMAKE_FLAGS) -GNinja .. && ninja
|
||||
cd $(BUN_DEPS_DIR)/boringssl && mkdir -p build && cd build && CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) -GNinja .. && ninja
|
||||
|
||||
boringssl-build-debug:
|
||||
cd $(BUN_DEPS_DIR)/boringssl && mkdir -p build && cd build && CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) -GNinja .. && ninja
|
||||
|
||||
boringssl-copy:
|
||||
cp $(BUN_DEPS_DIR)/boringssl/build/ssl/libssl.a $(BUN_DEPS_OUT_DIR)/libssl.a
|
||||
cp $(BUN_DEPS_DIR)/boringssl/build/crypto/libcrypto.a $(BUN_DEPS_OUT_DIR)/libcrypto.boring.a
|
||||
|
||||
boringssl: boringssl-build boringssl-copy
|
||||
boringssl-debug: boringssl-build-debug boringssl-copy
|
||||
|
||||
libbacktrace:
|
||||
cd $(BUN_DEPS_DIR)/libbacktrace && \
|
||||
CFLAGS="$(CFLAGS)" CC=$(CC) ./configure --disable-shared --enable-static --with-pic && \
|
||||
make -j$(CPUS) && \
|
||||
cp ./.libs/libbacktrace.a $(BUN_DEPS_OUT_DIR)/libbacktrace.a
|
||||
|
||||
libarchive:
|
||||
cd $(BUN_DEPS_DIR)/libarchive; \
|
||||
(make clean || echo ""); \
|
||||
(./build/clean.sh || echo ""); \
|
||||
./build/autogen.sh; \
|
||||
CFLAGS=$(CFLAGS) CC=$(CC) ./configure --disable-shared --enable-static --with-pic --disable-bsdtar --disable-bsdcat --disable-rpath --enable-posix-regex-lib --without-xml2 --without-expat --without-openssl --without-iconv --without-zlib; \
|
||||
CFLAGS="$(CFLAGS)" CC=$(CC) ./configure --disable-shared --enable-static --with-pic --disable-bsdtar --disable-bsdcat --disable-rpath --enable-posix-regex-lib --without-xml2 --without-expat --without-openssl --without-iconv --without-zlib; \
|
||||
make -j${CPUS}; \
|
||||
cp ./.libs/libarchive.a $(BUN_DEPS_OUT_DIR)/libarchive.a;
|
||||
|
||||
@@ -292,7 +346,7 @@ tgz-debug:
|
||||
vendor: require init-submodules vendor-without-check
|
||||
|
||||
zlib:
|
||||
cd $(BUN_DEPS_DIR)/zlib; cmake $(CMAKE_FLAGS) .; make CFLAGS=$(CFLAGS);
|
||||
cd $(BUN_DEPS_DIR)/zlib; CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) .; CFLAGS="$(CFLAGS)" make;
|
||||
cp $(BUN_DEPS_DIR)/zlib/libz.a $(BUN_DEPS_OUT_DIR)/libz.a
|
||||
|
||||
docker-login:
|
||||
@@ -314,6 +368,7 @@ docker-push-base:
|
||||
|
||||
require:
|
||||
@echo "Checking if the required utilities are available..."
|
||||
@if [ $(CLANG_VERSION) -lt "13" ]; then echo -e "ERROR: clang version >=13 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@13"; exit 1; fi
|
||||
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
|
||||
@esbuild --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
|
||||
@npm --version >/dev/null 2>&1 || (echo -e "ERROR: npm is required."; exit 1)
|
||||
@@ -328,6 +383,9 @@ init-submodules:
|
||||
build-obj:
|
||||
$(ZIG) build obj -Drelease-fast
|
||||
|
||||
build-obj-safe:
|
||||
$(ZIG) build obj -Drelease-safe
|
||||
|
||||
sign-macos-x64:
|
||||
gon sign.macos-x64.json
|
||||
|
||||
@@ -337,7 +395,8 @@ sign-macos-aarch64:
|
||||
cls:
|
||||
@echo "\n\n---\n\n"
|
||||
|
||||
release: all-js jsc-bindings-mac build-obj cls bun-link-lld-release release-bin-entitlements
|
||||
release: all-js jsc-bindings-mac build-obj cls bun-link-lld-release bun-link-lld-release-dsym release-bin-entitlements
|
||||
release-safe: all-js jsc-bindings-mac build-obj-safe cls bun-link-lld-release bun-link-lld-release-dsym release-bin-entitlements
|
||||
|
||||
jsc-check:
|
||||
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/javascript/jsc/WebKit -f $(shell pwd)/src/javascript/jsc/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
|
||||
@@ -362,6 +421,7 @@ fallback_decoder:
|
||||
|
||||
runtime_js:
|
||||
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
|
||||
|
||||
runtime_js_dev:
|
||||
@NODE_ENV=development esbuild --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@@ -386,7 +446,6 @@ fetch-debug:
|
||||
httpbench-debug:
|
||||
$(ZIG) build httpbench-obj
|
||||
$(CXX) $(DEBUG_PACKAGE_DIR)/httpbench.o -g -o ./misctools/http_bench $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES)
|
||||
rm -rf $(DEBUG_PACKAGE_DIR)/httpbench.o
|
||||
|
||||
|
||||
httpbench-release:
|
||||
@@ -394,8 +453,6 @@ httpbench-release:
|
||||
$(CXX) $(PACKAGE_DIR)/httpbench.o -g -O3 -o ./misctools/http_bench $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES)
|
||||
rm -rf $(PACKAGE_DIR)/httpbench.o
|
||||
|
||||
bun-codesign-debug:
|
||||
bun-codesign-release-local:
|
||||
|
||||
|
||||
|
||||
@@ -417,6 +474,10 @@ bun-codesign-release-local:
|
||||
|
||||
endif
|
||||
|
||||
bun-codesign-debug:
|
||||
bun-codesign-release-local:
|
||||
|
||||
|
||||
jsc: jsc-build jsc-copy-headers jsc-bindings
|
||||
jsc-build: $(JSC_BUILD_STEPS)
|
||||
jsc-bindings: jsc-bindings-headers jsc-bindings-mac
|
||||
@@ -444,6 +505,8 @@ jsc-bindings-headers:
|
||||
$(SED) -i '/pub const max_align_t/{N;N;N;d;}' src/javascript/jsc/bindings/headers.zig
|
||||
$(SED) -i '/pub const ZigErrorCode/d' src/javascript/jsc/bindings/headers.zig
|
||||
$(SED) -i '/pub const JSClassRef/d' src/javascript/jsc/bindings/headers.zig
|
||||
cat src/javascript/jsc/bindings/headers.zig > /tmp/headers.zig
|
||||
cat src/javascript/jsc/bindings/headers-replacements.zig /tmp/headers.zig > src/javascript/jsc/bindings/headers.zig
|
||||
$(ZIG) fmt src/javascript/jsc/bindings/headers.zig
|
||||
|
||||
|
||||
@@ -467,8 +530,8 @@ prepare-release: tag release-create
|
||||
release-create-auto-updater:
|
||||
|
||||
release-create:
|
||||
gh release create --title "Bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)"
|
||||
gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "Bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/Jarred-Sumner/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install Bun. Join Bun's Discord to get access https://bun.sh/discord"
|
||||
gh release create --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)"
|
||||
gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/Jarred-Sumner/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install bun. Join bun's Discord to get access https://bun.sh/discord"
|
||||
|
||||
release-bin-entitlements:
|
||||
|
||||
@@ -505,14 +568,27 @@ endif
|
||||
|
||||
|
||||
BUN_DEPLOY_ZIP = /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET).zip
|
||||
BUN_DEPLOY_DSYM = /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET).dSYM.tar.gz
|
||||
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
|
||||
release-bin-generate-copy-dsym:
|
||||
cd $(shell dirname $(BUN_RELEASE_BIN)) && tar -czvf $(shell basename $(BUN_DEPLOY_DSYM)) $(shell basename $(BUN_RELEASE_BIN)).dSYM && \
|
||||
mv $(shell basename $(BUN_DEPLOY_DSYM)) $(BUN_DEPLOY_DSYM)
|
||||
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
release-bin-generate-copy-dsym:
|
||||
endif
|
||||
|
||||
release-bin-generate-copy:
|
||||
rm -rf /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET) $(BUN_DEPLOY_ZIP)
|
||||
mkdir -p /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET)
|
||||
cp $(BUN_RELEASE_BIN) /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET)/bun
|
||||
|
||||
release-bin-generate: release-bin-generate-copy release-bin-generate-zip
|
||||
release-bin-generate: release-bin-generate-copy release-bin-generate-zip release-bin-generate-copy-dsym
|
||||
|
||||
|
||||
release-bin-check-version:
|
||||
@@ -527,17 +603,30 @@ release-bin-check: release-bin-check-version
|
||||
@make -B check-glibc-version-dependency
|
||||
endif
|
||||
|
||||
|
||||
release-bin-push-bin:
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP)
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP) --repo $(BUN_AUTO_UPDATER_REPO)
|
||||
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
release-bin-push-dsym:
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_DSYM)
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_DSYM) --repo $(BUN_AUTO_UPDATER_REPO)
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
release-bin-push-dsym:
|
||||
endif
|
||||
|
||||
|
||||
release-bin-push: release-bin-push-bin release-bin-push-dsym
|
||||
release-bin-without-push: test-all release-bin-check release-bin-generate release-bin-codesign
|
||||
release-bin: release-bin-without-push release-bin-push
|
||||
|
||||
release-bin-dir:
|
||||
echo $(PACKAGE_DIR)
|
||||
|
||||
|
||||
release-bin-push:
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP)
|
||||
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP) --repo $(BUN_AUTO_UPDATER_REPO)
|
||||
|
||||
dev-obj:
|
||||
$(ZIG) build obj
|
||||
|
||||
@@ -552,7 +641,15 @@ mkdir-dev:
|
||||
test-install:
|
||||
cd integration/scripts && $(NPM_CLIENT) install
|
||||
|
||||
test-all: test-install test-with-hmr test-no-hmr test-create-next test-create-react test-bun-run
|
||||
test-bun-dev:
|
||||
BUN_BIN=$(RELEASE_BUN) bash integration/apps/bun-dev.sh
|
||||
BUN_BIN=$(RELEASE_BUN) bash integration/apps/bun-dev-index-html.sh
|
||||
|
||||
test-dev-bun-dev:
|
||||
BUN_BIN=$(DEBUG_BUN) bash integration/apps/bun-dev.sh
|
||||
BUN_BIN=$(DEBUG_BUN) bash integration/apps/bun-dev-index-html.sh
|
||||
|
||||
test-all: test-install test-with-hmr test-no-hmr test-create-next test-create-react test-bun-run test-bun-install test-bun-dev
|
||||
|
||||
copy-test-node-modules:
|
||||
rm -rf integration/snippets/package-json-exports/node_modules || echo "";
|
||||
@@ -572,6 +669,12 @@ test-create-next:
|
||||
test-bun-run:
|
||||
cd integration/apps && BUN_BIN=$(RELEASE_BUN) bash ./bun-run-check.sh
|
||||
|
||||
test-bun-install:
|
||||
cd integration/apps && JS_RUNTIME=$(RELEASE_BUN) NPM_CLIENT=$(RELEASE_BUN) bash ./bun-install.sh
|
||||
|
||||
test-dev-bun-install:
|
||||
cd integration/apps && JS_RUNTIME=$(DEBUG_BUN) NPM_CLIENT=$(DEBUG_BUN) bash ./bun-install.sh
|
||||
|
||||
test-create-react:
|
||||
BUN_BIN=$(RELEASE_BUN) bash integration/apps/bun-create-react.sh
|
||||
|
||||
@@ -593,7 +696,7 @@ test-dev-no-hmr: copy-test-node-modules
|
||||
test-dev-bun-run:
|
||||
cd integration/apps && BUN_BIN=$(DEBUG_BUN) bash bun-run-check.sh
|
||||
|
||||
test-dev-all: test-dev-with-hmr test-dev-no-hmr test-dev-create-next test-dev-create-react test-dev-bun-run
|
||||
test-dev-all: test-dev-with-hmr test-dev-no-hmr test-dev-create-next test-dev-create-react test-dev-bun-run test-dev-bun-install test-dev-bun-dev
|
||||
test-dev-bunjs:
|
||||
|
||||
test-dev: test-dev-with-hmr
|
||||
@@ -601,8 +704,35 @@ test-dev: test-dev-with-hmr
|
||||
jsc-copy-headers:
|
||||
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
|
||||
|
||||
# This is a workaround for a JSC bug that impacts aarch64
|
||||
# on macOS, it never requests JIT permissions
|
||||
jsc-force-fastjit:
|
||||
$(SED) -i "s/USE(PTHREAD_JIT_PERMISSIONS_API)/CPU(ARM64)/g" $(WEBKIT_DIR)/Source/JavaScriptCore/jit/ExecutableAllocator.h
|
||||
$(SED) -i "s/USE(PTHREAD_JIT_PERMISSIONS_API)/CPU(ARM64)/g" $(WEBKIT_DIR)/Source/JavaScriptCore/assembler/FastJITPermissions.h
|
||||
$(SED) -i "s/USE(PTHREAD_JIT_PERMISSIONS_API)/CPU(ARM64)/g" $(WEBKIT_DIR)/Source/JavaScriptCore/jit/ExecutableAllocator.cpp
|
||||
$(SED) -i "s/GIGACAGE_ENABLED/0/g" $(WEBKIT_DIR)/Source/WTF/wtf/Gigacage.h
|
||||
|
||||
jsc-build-mac-compile:
|
||||
cd $(WEBKIT_DIR) && ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_BUILD_TYPE=relwithdebinfo -DUSE_PTHREAD_JIT_PERMISSIONS_API=ON $(CMAKE_FLAGS_WITHOUT_RELEASE)"
|
||||
mkdir -p $(WEBKIT_RELEASE_DIR) $(WEBKIT_DIR);
|
||||
cd $(WEBKIT_RELEASE_DIR) && \
|
||||
ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" \
|
||||
CMAKE_BUILD_TYPE=Release cmake \
|
||||
-DPORT="JSCOnly" \
|
||||
-DENABLE_STATIC_JSC=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
|
||||
-DPTHREAD_JIT_PERMISSIONS_API=1 \
|
||||
-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$(WEBKIT_DIR) \
|
||||
$(WEBKIT_RELEASE_DIR) && \
|
||||
CFLAGS="$CFLAGS -ffat-lto-objects" CXXFLAGS="$CXXFLAGS -ffat-lto-objects" \
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config Release --target jsc
|
||||
|
||||
jsc-build-linux-compile-config:
|
||||
mkdir -p $(WEBKIT_RELEASE_DIR)
|
||||
@@ -629,7 +759,7 @@ jsc-build-linux-compile-build:
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
|
||||
|
||||
|
||||
jsc-build-mac: jsc-build-mac-compile jsc-build-mac-copy
|
||||
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-mac-copy
|
||||
|
||||
jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-mac-copy
|
||||
|
||||
@@ -654,7 +784,7 @@ jsc-bindings-mac: $(OBJ_FILES)
|
||||
|
||||
# mimalloc is built as object files so that it can overload the system malloc
|
||||
mimalloc:
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; cmake $(CMAKE_FLAGS) -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; make;
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) -DMI_SKIP_COLLECT_ON_EXIT=ON -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; make;
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
|
||||
bun-link-lld-debug:
|
||||
@@ -668,7 +798,6 @@ bun-link-lld-debug:
|
||||
bun-relink-copy:
|
||||
cp /tmp/bun-$(PACKAGE_JSON_VERSION).o $(BUN_RELEASE_BIN).o
|
||||
|
||||
bun-relink: bun-relink-copy bun-link-lld-release
|
||||
|
||||
|
||||
bun-link-lld-release:
|
||||
@@ -679,24 +808,46 @@ bun-link-lld-release:
|
||||
-flto \
|
||||
-ftls-model=initial-exec \
|
||||
-O3
|
||||
rm -rf $(BUN_RELEASE_BIN).dSYM
|
||||
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
bun-link-lld-release-dsym:
|
||||
$(DSYMUTIL) -o $(BUN_RELEASE_BIN).dSYM $(BUN_RELEASE_BIN)
|
||||
-$(STRIP) $(BUN_RELEASE_BIN)
|
||||
mv $(BUN_RELEASE_BIN).o /tmp/bun-$(PACKAGE_JSON_VERSION).o
|
||||
|
||||
copy-to-bun-release-dir-dsym:
|
||||
gzip --keep -c $(PACKAGE_DIR)/bun.dSYM > $(BUN_RELEASE_DIR)/bun.dSYM.gz
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
bun-link-lld-release-dsym:
|
||||
-$(STRIP) $(BUN_RELEASE_BIN)
|
||||
mv $(BUN_RELEASE_BIN).o /tmp/bun-$(PACKAGE_JSON_VERSION).o
|
||||
copy-to-bun-release-dir-dsym:
|
||||
|
||||
endif
|
||||
|
||||
|
||||
bun-relink: bun-relink-copy bun-link-lld-release bun-link-lld-release-dsym
|
||||
|
||||
|
||||
# We do this outside of build.zig for performance reasons
|
||||
# The C compilation stuff with build.zig is really slow and we don't need to run this as often as the rest
|
||||
$(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
|
||||
$(CXX) -c -o $@ $< \
|
||||
$(CLANG_FLAGS) \
|
||||
$(CLANG_FLAGS) $(PLATFORM_LINKER_FLAGS) \
|
||||
-O3 \
|
||||
-w
|
||||
-fvectorize \
|
||||
-w -g
|
||||
|
||||
sizegen:
|
||||
$(CXX) src/javascript/jsc/headergen/sizegen.cpp -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1
|
||||
$(BUN_TMP_DIR)/sizegen > src/javascript/jsc/bindings/sizes.zig
|
||||
|
||||
picohttp:
|
||||
$(CC) $(MARCH_NATIVE) $(MACOS_MIN_FLAG) -O3 -g -fPIE -c $(BUN_DEPS_DIR)/picohttpparser/picohttpparser.c -I$(BUN_DEPS_DIR) -o $(BUN_DEPS_OUT_DIR)/picohttpparser.o; cd ../../
|
||||
$(CC) $(CFLAGS) -O3 -g -fPIC -c $(BUN_DEPS_DIR)/picohttpparser/picohttpparser.c -I$(BUN_DEPS_DIR) -o $(BUN_DEPS_OUT_DIR)/picohttpparser.o; cd ../../
|
||||
|
||||
analytics:
|
||||
./node_modules/.bin/peechy --schema src/analytics/schema.peechy --zig src/analytics/analytics_schema.zig
|
||||
@@ -818,10 +969,10 @@ run-all-unit-tests:
|
||||
--cache-dir /tmp/zig-cache-bun-__main_test \
|
||||
-fallow-shlib-undefined \
|
||||
$(ARCHIVE_FILES) $(ICU_FLAGS) $(JSC_FILES) $(OBJ_FILES) && \
|
||||
zig-out/bin/__main_test notARealParam
|
||||
zig-out/bin/__main_test $(ZIG)
|
||||
|
||||
run-unit:
|
||||
@zig-out/bin/$(testname) -- fake
|
||||
@zig-out/bin/$(testname) $(ZIG)
|
||||
|
||||
|
||||
|
||||
@@ -833,8 +984,16 @@ integration-test-dev:
|
||||
copy-install:
|
||||
cp src/cli/install.sh ../bun.sh/docs/install.html
|
||||
|
||||
copy-to-bun-release-dir:
|
||||
|
||||
|
||||
copy-to-bun-release-dir: copy-to-bun-release-dir-bin copy-to-bun-release-dir-dsym
|
||||
|
||||
copy-to-bun-release-dir-bin:
|
||||
cp -r $(PACKAGE_DIR)/bun $(BUN_RELEASE_DIR)/bun
|
||||
cp -r $(PACKAGE_DIR)/bun-profile $(BUN_RELEASE_DIR)/bun-profile
|
||||
|
||||
PACKAGE_MAP = --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin network_thread $(BUN_DIR)/src/http/network_thread.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-end --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-end --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin clap $(BUN_DIR)/src/deps/zig-clap/clap.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin network_thread $(BUN_DIR)/src/http/network_thread.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-end --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin network_thread $(BUN_DIR)/src/http/network_thread.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-end --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-end --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-end --pkg-begin network_thread $(BUN_DIR)/src/http/network_thread.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-end --pkg-end
|
||||
|
||||
PACKAGE_MAP = --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-end --pkg-end --pkg-end --pkg-end --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin clap $(BUN_DIR)/src/deps/zig-clap/clap.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-end --pkg-end --pkg-end --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/deps/boringssl.zig --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-end
|
||||
|
||||
|
||||
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
|
||||
|
||||
462
README.md
462
README.md
@@ -1,6 +1,6 @@
|
||||
# Bun
|
||||
# bun
|
||||
|
||||
Bun is a new:
|
||||
bun is a new:
|
||||
|
||||
- JavaScript/TypeScript/JSX transpiler
|
||||
- JavaScript & CSS bundler
|
||||
@@ -9,13 +9,13 @@ Bun is a new:
|
||||
- Task runner for package.json scripts
|
||||
- npm-compatible package manager
|
||||
|
||||
All in one fast & easy-to-use tool. Instead of 1,000 node_modules for development, you only need Bun.
|
||||
All in one fast & easy-to-use tool. Instead of 1,000 node_modules for development, you only need bun.
|
||||
|
||||
**Bun is experimental software**. Join [Bun’s Discord](https://bun.sh/discord) for help and have a look at [things that don’t work yet](#things-that-dont-work-yet).
|
||||
**bun is experimental software**. Join [bun’s Discord](https://bun.sh/discord) for help and have a look at [things that don’t work yet](#things-that-dont-work-yet).
|
||||
|
||||
## Install
|
||||
|
||||
Native: (macOS x64 & Silicon, Linux x64, WSL)
|
||||
Native: (macOS x64 & Silicon, Linux x64, Windows Subsystem for Linux)
|
||||
|
||||
```sh
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
@@ -28,20 +28,23 @@ docker pull jarredsumner/bun:edge
|
||||
docker run --rm --init --ulimit memlock=-1:-1 jarredsumner/bun:edge
|
||||
```
|
||||
|
||||
If using Linux, kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Install](#install)
|
||||
- [Benchmarks](#benchmarks)
|
||||
- [Using Bun as a package manager](#using-bun-as-a-package-manager)
|
||||
- [Using Bun as a task runner](#using-bun-as-a-task-runner)
|
||||
- [Using Bun with Next.js](#using-bun-with-nextjs)
|
||||
- [Using Bun with single page apps](#using-bun-with-single-page-apps)
|
||||
- [Using Bun with Create React App](#using-bun-with-create-react-app)
|
||||
- [Using Bun with TypeScript](#using-bun-with-typescript)
|
||||
- [Using Tailwind with Bun](#using-tailwind-with-bun)
|
||||
- [Using bun as a package manager](#using-bun-as-a-package-manager)
|
||||
- [Using bun as a task runner](#using-bun-as-a-task-runner)
|
||||
- [Using bun with Next.js](#using-bun-with-nextjs)
|
||||
- [Using bun with single page apps](#using-bun-with-single-page-apps)
|
||||
- [Using bun with Create React App](#using-bun-with-create-react-app)
|
||||
- [Using bun with TypeScript](#using-bun-with-typescript)
|
||||
- [Using Tailwind with bun](#using-tailwind-with-bun)
|
||||
- [Things that don’t work yet](#things-that-dont-work-yet)
|
||||
- [Limitations & intended usage](#limitations--intended-usage)
|
||||
- [Configuration](#configuration)
|
||||
- [bunfig.toml](#bunfig-toml)
|
||||
- [Loaders](#loaders)
|
||||
- [CSS in JS](#css-in-js)
|
||||
- [CSS Loader](#css-loader)
|
||||
@@ -54,21 +57,26 @@ docker run --rm --init --ulimit memlock=-1:-1 jarredsumner/bun:edge
|
||||
- [`bun create`](#bun-run)
|
||||
- [`bun bun`](#bun-bun)
|
||||
- [`bun completions`](#bun-completions)
|
||||
- [`Bun.Transpiler`](#buntranspiler)
|
||||
- [`transformSync`](#buntranspilertransformsync)
|
||||
- [`transform`](#buntranspilertransform)
|
||||
- [`scan`](#buntranspilerscan)
|
||||
- [`scanImports`](#buntranspilerscanimports)
|
||||
- [Environment variables](#environment-variables)
|
||||
- [Credits](#credits)
|
||||
- [License](#license)
|
||||
- [Developing Bun](#developing-bun)
|
||||
- [Developing bun](#developing-bun)
|
||||
- [VSCode Dev Container](#vscode-dev-container-linux)
|
||||
- [MacOS](#macos)
|
||||
- [vscode-zig](#vscode-zig)
|
||||
|
||||
## Benchmarks
|
||||
|
||||
**CSS**: [Bun is 14x faster](./bench/hot-module-reloading/css-stress-test) than Next.js at hot reloading CSS. TODO: compare Vite
|
||||
**CSS**: [bun is 14x faster](./bench/hot-module-reloading/css-stress-test) than Next.js at hot reloading CSS. TODO: compare Vite
|
||||
|
||||
**JavaScript**: TODO
|
||||
|
||||
## Using Bun as a package manager
|
||||
## Using bun as a package manager
|
||||
|
||||
On Linux, `bun install` tends to install packages 20x - 100x faster than `npm install`. On macOS, it’s more like 4x - 80x.
|
||||
|
||||
@@ -87,9 +95,25 @@ bun remove react
|
||||
bun add preact
|
||||
```
|
||||
|
||||
## Using Bun as a task runner
|
||||
<details> <summary><strong>For Linux users</strong>: <code>bun install</code> needs Linux Kernel 5.6 or higher to work well</summary>
|
||||
|
||||
Instead of waiting 170ms for your npm client to start for each task, you wait 6ms for Bun.
|
||||
The minimum Linux Kernel version is 5.1. If you're on Linux kernel 5.1 - 5.5, `bun install` should still work, but HTTP requests will be slow due to lack of support for io_uring's `connect()` operation.
|
||||
|
||||
If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wiki.ubuntu.com/Kernel/LTSEnablementStack):
|
||||
|
||||
```bash
|
||||
# If this returns a version >= 5.6, you don't need to do anything
|
||||
uname -r
|
||||
|
||||
# Install the official Ubuntu hardware enablement kernel
|
||||
sudo apt install --install-recommends linux-generic-hwe-20.04
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Using bun as a task runner
|
||||
|
||||
Instead of waiting 170ms for your npm client to start for each task, you wait 6ms for bun.
|
||||
|
||||
To use bun as a task runner, run `bun run` instead of `npm run`.
|
||||
|
||||
@@ -112,9 +136,9 @@ Assuming a package.json with a `"clean"` command in `"scripts"`:
|
||||
}
|
||||
```
|
||||
|
||||
## Using Bun with Next.js
|
||||
## Using bun with Next.js
|
||||
|
||||
To create a new Next.js app with Bun:
|
||||
To create a new Next.js app with bun:
|
||||
|
||||
```bash
|
||||
bun create next ./app
|
||||
@@ -122,7 +146,7 @@ cd app
|
||||
bun
|
||||
```
|
||||
|
||||
To use an existing Next.js app with Bun:
|
||||
To use an existing Next.js app with bun:
|
||||
|
||||
```bash
|
||||
npm install bun-framework-next
|
||||
@@ -142,11 +166,11 @@ Here’s what doesn’t work yet:
|
||||
- API routes, middleware (middleware is easier to support though! similar SSR API)
|
||||
- styled-jsx (technically not Next.js but often used with it)
|
||||
|
||||
When using Next.js, Bun automatically reads configuration from `.env.local`, `.env.development` and `.env` (in that order). `process.env.NEXT_PUBLIC_` and `process.env.NEXT_` automatically are replaced via `--define`.
|
||||
When using Next.js, bun automatically reads configuration from `.env.local`, `.env.development` and `.env` (in that order). `process.env.NEXT_PUBLIC_` and `process.env.NEXT_` automatically are replaced via `--define`.
|
||||
|
||||
Currently, any time you import new dependencies from `node_modules`, you will need to re-run `bun bun --use next`. This will eventually be automatic.
|
||||
|
||||
## Using Bun with single page apps
|
||||
## Using bun with single page apps
|
||||
|
||||
In your project folder root (where `package.json` is):
|
||||
|
||||
@@ -170,7 +194,7 @@ Here are examples of routing from `public/` and how they’re matched:
|
||||
|
||||
If `public/index.html` exists, it becomes the default page instead of a 404 page, unless that pathname has a file extension.
|
||||
|
||||
### Using Bun with Create React App
|
||||
### Using bun with Create React App
|
||||
|
||||
To create new a React app:
|
||||
|
||||
@@ -193,7 +217,7 @@ bun bun ./src/index.js # jsx, tsx, ts also work. can be multiple files
|
||||
bun
|
||||
```
|
||||
|
||||
From there, Bun relies on the filesystem for mapping dev server paths to source files. All URL paths are relative to the project root (where `package.json` is located).
|
||||
From there, bun relies on the filesystem for mapping dev server paths to source files. All URL paths are relative to the project root (where `package.json` is located).
|
||||
|
||||
Here are examples of routing source code file paths:
|
||||
|
||||
@@ -207,17 +231,17 @@ You do not need to include file extensions in `import` paths. CommonJS-style imp
|
||||
|
||||
You can override the public directory by passing `--public-dir="path-to-folder"`.
|
||||
|
||||
If no directory is specified and `./public/` doesn’t exist, Bun will try `./static/`. If `./static/` does not exist, but won’t serve from a public directory. If you pass `--public-dir=./` Bun will serve from the current directory, but it will check the current directory last instead of first.
|
||||
If no directory is specified and `./public/` doesn’t exist, bun will try `./static/`. If `./static/` does not exist, but won’t serve from a public directory. If you pass `--public-dir=./` bun will serve from the current directory, but it will check the current directory last instead of first.
|
||||
|
||||
## Using Bun with TypeScript
|
||||
## Using bun with TypeScript
|
||||
|
||||
TypeScript just works. There’s nothing to configure and nothing extra to install. If you import a `.ts` or `.tsx` file, Bun will transpile it into JavaScript. Bun also transpiles `node_modules` containing `.ts` or `.tsx` files. This is powered by Bun’s TypeScript transpiler, so it’s fast.
|
||||
TypeScript just works. There’s nothing to configure and nothing extra to install. If you import a `.ts` or `.tsx` file, bun will transpile it into JavaScript. bun also transpiles `node_modules` containing `.ts` or `.tsx` files. This is powered by bun’s TypeScript transpiler, so it’s fast.
|
||||
|
||||
Bun also reads `tsconfig.json`, including `baseUrl` and `paths`.
|
||||
bun also reads `tsconfig.json`, including `baseUrl` and `paths`.
|
||||
|
||||
## Using Tailwind with Bun
|
||||
## Using Tailwind with bun
|
||||
|
||||
[Tailwind](https://tailwindcss.com/) is a popular CSS utility framework. Currently, the easiest way to use Tailwind with Bun is through Tailwind’s CLI. That means running both `bun` and `tailwind`, and importing the file `tailwind`’s CLI outputs.
|
||||
[Tailwind](https://tailwindcss.com/) is a popular CSS utility framework. Currently, the easiest way to use Tailwind with bun is through Tailwind’s CLI. That means running both `bun` and `tailwind`, and importing the file `tailwind`’s CLI outputs.
|
||||
|
||||
Tailwind’s docs talk more about [Tailwind’s CLI usage](https://tailwindcss.com/docs/installation#watching-for-changes), but the gist is you’ll want to run this:
|
||||
|
||||
@@ -229,7 +253,7 @@ From there, make sure to import the `dist/tailwind.css` file (or what you chose
|
||||
|
||||
## Things that don’t work yet
|
||||
|
||||
Bun is a project with incredibly large scope, and it’s early days.
|
||||
bun is a project with incredibly large scope, and it’s early days.
|
||||
|
||||
| Feature | In |
|
||||
| ---------------------------------------------------------------------------------------------------------------------- | --------------- |
|
||||
@@ -244,9 +268,9 @@ Bun is a project with incredibly large scope, and it’s early days.
|
||||
| [TypeScript Decorators](https://www.typescriptlang.org/docs/handbook/decorators.html) | TS Transpiler |
|
||||
| `@jsxPragma` comments | JS Transpiler |
|
||||
| JSX source file name | JS Transpiler |
|
||||
| Sharing `.bun` files | Bun |
|
||||
| [Finish fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) | Bun.js |
|
||||
| [setTimeout](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) | Bun.js |
|
||||
| Sharing `.bun` files | bun |
|
||||
| [Finish fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) | bun.js |
|
||||
| [setTimeout](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) | bun.js |
|
||||
| [workspace: dependencies](https://github.com/Jarred-Sumner/bun/issues/83) | Package manager |
|
||||
| [git: dependencies](https://github.com/Jarred-Sumner/bun/issues/82) | Package manager |
|
||||
| [github: dependencies](https://github.com/Jarred-Sumner/bun/issues/81) | Package manager |
|
||||
@@ -260,29 +284,33 @@ TS Transpiler == TypeScript Transpiler
|
||||
<br/>
|
||||
Package manager == `bun install`
|
||||
<br/>
|
||||
Bun.js == Bun’s JavaScriptCore integration that executes JavaScript. Similar to how Node.js & Deno embed V8.
|
||||
bun.js == bun’s JavaScriptCore integration that executes JavaScript. Similar to how Node.js & Deno embed V8.
|
||||
</small>
|
||||
|
||||
### Limitations & intended usage
|
||||
|
||||
Bun is great for building websites & webapps. For libraries, consider using Rollup or esbuild instead. Bun currently doesn’t minify code and Bun’s dead code elimination doesn’t look beyond the current file.
|
||||
bun is great for building websites & webapps. For libraries, consider using Rollup or esbuild instead. bun currently doesn’t minify code and bun’s dead code elimination doesn’t look beyond the current file.
|
||||
|
||||
Today, Bun is focused on:
|
||||
Today, bun is focused on:
|
||||
|
||||
- Development, not production
|
||||
- Compatibility with existing frameworks & tooling
|
||||
|
||||
Ideally, most projects can use Bun with their existing tooling while making few changes to their codebase. That means using Bun in development, and continuing to use Webpack, esbuild, or another bundler in production. Using two bundlers might sound strange at first, but after all the production-only AST transforms, minification, and special development/production-only imported files...it’s not far from the status quo.
|
||||
Ideally, most projects can use bun with their existing tooling while making few changes to their codebase. That means using bun in development, and continuing to use Webpack, esbuild, or another bundler in production. Using two bundlers might sound strange at first, but after all the production-only AST transforms, minification, and special development/production-only imported files...it’s not far from the status quo.
|
||||
|
||||
Longer-term, Bun intends to replace Node.js, Webpack, Babel, and PostCSS (in production).
|
||||
Longer-term, bun intends to replace Node.js, Webpack, Babel, and PostCSS (in production).
|
||||
|
||||
## Configuration
|
||||
|
||||
### bunfig.toml
|
||||
|
||||
TODO: document this
|
||||
|
||||
### Loaders
|
||||
|
||||
A loader determines how to map imports & file extensions to transforms and output.
|
||||
|
||||
Currently, Bun implements the following loaders:
|
||||
Currently, bun implements the following loaders:
|
||||
|
||||
| Input | Loader | Output |
|
||||
| ----- | ----------------------------- | ------ |
|
||||
@@ -312,7 +340,7 @@ This will disable JSX transforms for `.js` files.
|
||||
|
||||
When importing CSS in JavaScript-like loaders, CSS is treated special.
|
||||
|
||||
By default, Bun will transform a statement like this:
|
||||
By default, bun will transform a statement like this:
|
||||
|
||||
```js
|
||||
import "../styles/global.css";
|
||||
@@ -336,16 +364,16 @@ An event handler for turning that into a `<link>` is automatically registered wh
|
||||
//@import url("http://localhost:3000/styles/globals.css");
|
||||
```
|
||||
|
||||
Additionally, Bun exposes an API for SSR/SSG that returns a flat list of URLs to css files imported. That function is `Bun.getImportedStyles()`.
|
||||
Additionally, bun exposes an API for SSR/SSG that returns a flat list of URLs to css files imported. That function is `bun.getImportedStyles()`.
|
||||
|
||||
```ts
|
||||
addEventListener("fetch", async (event: FetchEvent) => {
|
||||
var route = Bun.match(event);
|
||||
var route = bun.match(event);
|
||||
const App = await import("pages/_app");
|
||||
|
||||
// This returns all .css files that were imported in the line above.
|
||||
// It’s recursive, so any file that imports a CSS file will be included.
|
||||
const appStylesheets = Bun.getImportedStyles();
|
||||
const appStylesheets = bun.getImportedStyles();
|
||||
|
||||
// ...rest of code
|
||||
});
|
||||
@@ -355,7 +383,7 @@ This is useful for preventing flash of unstyled content.
|
||||
|
||||
### CSS Loader
|
||||
|
||||
Bun bundles `.css` files imported via `@import` into a single file. It doesn’t autoprefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. You’ll have to import those from a `.css` file.
|
||||
bun bundles `.css` files imported via `@import` into a single file. It doesn’t autoprefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. You’ll have to import those from a `.css` file.
|
||||
|
||||
This input:
|
||||
|
||||
@@ -378,13 +406,13 @@ Becomes:
|
||||
|
||||
### CSS runtime
|
||||
|
||||
To support hot CSS reloading, Bun inserts `@supports` annotations into CSS that tag which files a stylesheet is composed of. Browsers ignore this, so it doesn’t impact styles.
|
||||
To support hot CSS reloading, bun inserts `@supports` annotations into CSS that tag which files a stylesheet is composed of. Browsers ignore this, so it doesn’t impact styles.
|
||||
|
||||
By default, Bun’s runtime code automatically listens to `onimportcss` and will insert the `event.detail` into a `<link rel="stylesheet" href={${event.detail}}>` if there is no existing `link` tag with that stylesheet. That’s how Bun’s equivalent of `style-loader` works.
|
||||
By default, bun’s runtime code automatically listens to `onimportcss` and will insert the `event.detail` into a `<link rel="stylesheet" href={${event.detail}}>` if there is no existing `link` tag with that stylesheet. That’s how bun’s equivalent of `style-loader` works.
|
||||
|
||||
### Frameworks
|
||||
|
||||
Frameworks preconfigure Bun to enable developers to use Bun with their existing tooling.
|
||||
Frameworks preconfigure bun to enable developers to use bun with their existing tooling.
|
||||
|
||||
Frameworks are configured via the `framework` object in the `package.json` of the framework (not in the application’s `package.json`):
|
||||
|
||||
@@ -477,12 +505,12 @@ type Framework = Environment & {
|
||||
// and replaces an imported object with a proxy that mimics CSS module support without doing any class renaming.
|
||||
css?: "onimportcss" | "facade";
|
||||
|
||||
// Bun’s filesystem router
|
||||
// bun’s filesystem router
|
||||
router?: Router;
|
||||
};
|
||||
|
||||
type Define = {
|
||||
// By passing ".env", Bun will automatically load .env.local, .env.development, and .env if exists in the project root
|
||||
// By passing ".env", bun will automatically load .env.local, .env.development, and .env if exists in the project root
|
||||
// (in addition to the processes’ environment variables)
|
||||
// When "*", all environment variables will be automatically injected into the JavaScript loader
|
||||
// When a string like "NEXT_PUBLIC_", only environment variables starting with that prefix will be injected
|
||||
@@ -510,8 +538,8 @@ type Environment = {
|
||||
define?: Define;
|
||||
};
|
||||
|
||||
// Bun’s filesystem router
|
||||
// Currently, Bun supports pages by either an absolute match or a parameter match.
|
||||
// bun’s filesystem router
|
||||
// Currently, bun supports pages by either an absolute match or a parameter match.
|
||||
// pages/index.tsx will be executed on navigation to "/" and "/index"
|
||||
// pages/posts/[id].tsx will be executed on navigation to "/posts/123"
|
||||
// Routes & parameters are automatically passed to `fallback` and `server`.
|
||||
@@ -534,15 +562,15 @@ If you’re interested in adding a framework integration, please reach out. Ther
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Bun not running on an M1 (or Apple Silicon)
|
||||
### bun not running on an M1 (or Apple Silicon)
|
||||
|
||||
If you see a message like this
|
||||
|
||||
> [1] 28447 killed bun create next ./test
|
||||
|
||||
It most likely means you’re running bun’s x64 version on Apple Silicon. This happens if bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
|
||||
It most likely means you’re running bun’s x64 version on Apple Silicon. This happens if bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which bun indirectly uses.
|
||||
|
||||
The fix is to ensure you installed a version of Bun built for Apple Silicon.
|
||||
The fix is to ensure you installed a version of bun built for Apple Silicon.
|
||||
|
||||
### error: Unexpected
|
||||
|
||||
@@ -550,14 +578,14 @@ If you see an error like this:
|
||||
|
||||

|
||||
|
||||
It usually means the max number of open file descriptors is being explicitly set to a low number. By default, Bun requests the max number of file descriptors available (which on macOS, is something like 32,000). But, if you previously ran into ulimit issues with e.g. Chokidar, someone on The Internet may have advised you to run `ulimit -n 8096`.
|
||||
It usually means the max number of open file descriptors is being explicitly set to a low number. By default, bun requests the max number of file descriptors available (which on macOS, is something like 32,000). But, if you previously ran into ulimit issues with e.g. Chokidar, someone on The Internet may have advised you to run `ulimit -n 8096`.
|
||||
|
||||
That advice unfortunately **lowers** the hard limit to `8096`. This can be a problem in large repositories or projects with lots of dependencies. Chokidar (and other watchers) don’t seem to call `setrlimit`, which means they’re reliant on the (much lower) soft limit.
|
||||
|
||||
To fix this issue:
|
||||
|
||||
1. Remove any scripts that call `ulimit -n` and restart your shell.
|
||||
2. Try agin, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 65542`
|
||||
2. Try agin, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 2147483646`
|
||||
3. Try again, and if that still doesn’t fix it, open an issue
|
||||
|
||||
## Reference
|
||||
@@ -579,19 +607,19 @@ Environment variables
|
||||
| BUN_CONFIG_SKIP_LOAD_LOCKFILE | Don’t load a lockfile |
|
||||
| BUN_CONFIG_SKIP_INSTALL_PACKAGES | Don’t install any packages |
|
||||
|
||||
Bun always tries to use the fastest available installation method for the target platform. On macOS, that’s `clonefile` and on Linux, that’s `hardlink`. You can change which installation method is used with the `--backend` flag. When unavailable or on error, `clonefile` and `hardlink` fallsback to a platform-specific implementation of copying files.
|
||||
bun always tries to use the fastest available installation method for the target platform. On macOS, that’s `clonefile` and on Linux, that’s `hardlink`. You can change which installation method is used with the `--backend` flag. When unavailable or on error, `clonefile` and `hardlink` fallsback to a platform-specific implementation of copying files.
|
||||
|
||||
Bun stores installed packages from npm in `~/.bun/install/cache/${name}@${version}`. Note that if the semver version has a `build` or a `pre` tag, it is replaced with a hash of that value instead. This is to reduce chances of errors from long file paths, but unfortunately complicates figuring out where a package was installed on disk.
|
||||
bun stores installed packages from npm in `~/.bun/install/cache/${name}@${version}`. Note that if the semver version has a `build` or a `pre` tag, it is replaced with a hash of that value instead. This is to reduce chances of errors from long file paths, but unfortunately complicates figuring out where a package was installed on disk.
|
||||
|
||||
When the `node_modules` folder exists, before installing, Bun checks if the `"name"` and `"version"` in `package/package.json` in the expected node_modules folder matches the expected `name` and `version`. This is how it determines whether or not it should install. It uses a custom JSON parser which stops parsing as soon as it finds `"name"` and `"version"`.
|
||||
When the `node_modules` folder exists, before installing, bun checks if the `"name"` and `"version"` in `package/package.json` in the expected node_modules folder matches the expected `name` and `version`. This is how it determines whether or not it should install. It uses a custom JSON parser which stops parsing as soon as it finds `"name"` and `"version"`.
|
||||
|
||||
When a `bun.lockb` doesn’t exist or `package.json` has changed dependencies, tarballs are downloaded & extracted eagerly while resolving.
|
||||
|
||||
When a `bun.lockb` exists and `package.json` hasn’t changed, Bun downloads missing dependencies lazily. If the package with a matching `name` & `version` already exists in the expected location within `node_modules`, Bun won’t attempt to download the tarball.
|
||||
When a `bun.lockb` exists and `package.json` hasn’t changed, bun downloads missing dependencies lazily. If the package with a matching `name` & `version` already exists in the expected location within `node_modules`, bun won’t attempt to download the tarball.
|
||||
|
||||
#### Platform-specific dependencies?
|
||||
|
||||
Bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won’t change between platforms/architectures even if the packages ultimately installed do change.
|
||||
bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won’t change between platforms/architectures even if the packages ultimately installed do change.
|
||||
|
||||
#### Peer dependencies?
|
||||
|
||||
@@ -599,11 +627,11 @@ Peer dependencies are handled similarly to yarn. `bun install` does not automati
|
||||
|
||||
#### Lockfile
|
||||
|
||||
`bun.lockb` is Bun’s binary lockfile format.
|
||||
`bun.lockb` is bun’s binary lockfile format.
|
||||
|
||||
#### Why is it binary?
|
||||
|
||||
In a word: Performance. Bun’s lockfile saves & loads incredibly quickly, and saves a lot more data than what is typically inside lockfiles.
|
||||
In a word: Performance. bun’s lockfile saves & loads incredibly quickly, and saves a lot more data than what is typically inside lockfiles.
|
||||
|
||||
#### How do I inspect it?
|
||||
|
||||
@@ -627,14 +655,14 @@ rm -rf ~/.bun/install/cache
|
||||
|
||||
#### npm registry metadata
|
||||
|
||||
Bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk.
|
||||
bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk.
|
||||
You will see these files in `~/.bun/install/cache/*.npm`. The filename pattern is `${hash(packageName)}.npm`. It’s a hash so that extra directories don’t need to be created for scoped packages
|
||||
|
||||
Bun’s usage of `Cache-Control` ignores `Age`. This improves performance, but means Bun may be about 5 minutes out of date to receive the the latest package version metadata from npm.
|
||||
bun’s usage of `Cache-Control` ignores `Age`. This improves performance, but means bun may be about 5 minutes out of date to receive the the latest package version metadata from npm.
|
||||
|
||||
### `bun run`
|
||||
|
||||
`bun run` is a fast `package.json` scripts runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for Bun.
|
||||
`bun run` is a fast `package.json` scripts runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
|
||||
|
||||
By default, `bun run` prints the script that will be invoked:
|
||||
|
||||
@@ -673,7 +701,7 @@ react-scripts eject
|
||||
4 scripts
|
||||
```
|
||||
|
||||
`bun run` automatically loads environment variables from `.env` into the shell/task. `.env` files are loaded with the same priority as the rest of Bun, so that means:
|
||||
`bun run` automatically loads environment variables from `.env` into the shell/task. `.env` files are loaded with the same priority as the rest of bun, so that means:
|
||||
|
||||
1. `.env.local` is first
|
||||
2. if (`$NODE_ENV` === `"production"`) `.env.production` else `.env.development`
|
||||
@@ -691,7 +719,7 @@ bun run relay-compiler
|
||||
|
||||
# You can also do this, but:
|
||||
# - It will only lookup packages in `node_modules/.bin` instead of `$PATH`
|
||||
# - It will start Bun’s dev server if the script name doesn’t exist (`bun` starts the dev server by default)
|
||||
# - It will start bun’s dev server if the script name doesn’t exist (`bun` starts the dev server by default)
|
||||
bun relay-compiler
|
||||
```
|
||||
|
||||
@@ -702,7 +730,7 @@ To pass additional flags through to the task or executable, there are two ways:
|
||||
bun run relay-compiler -- -–help
|
||||
|
||||
# Implicit: if you do not include "--", anything *after* the script name will be passed through
|
||||
# Bun flags are parsed first, which means e.g. `bun run relay-compiler --help` will print Bun’s help instead of relay-compiler’s help.
|
||||
# bun flags are parsed first, which means e.g. `bun run relay-compiler --help` will print bun’s help instead of relay-compiler’s help.
|
||||
bun run relay-compiler --schema foo.graphql
|
||||
```
|
||||
|
||||
@@ -750,13 +778,13 @@ bun create https://github.com/github-user/repo-name destination
|
||||
bun create github.com/github-user/repo-name destination
|
||||
```
|
||||
|
||||
Note: you don’t need `bun create` to use Bun. You don’t need any configuration at all. This command exists to make it a little easier.
|
||||
Note: you don’t need `bun create` to use bun. You don’t need any configuration at all. This command exists to make it a little easier.
|
||||
|
||||
#### Local templates
|
||||
|
||||
If you have your own boilerplate you prefer using, copy it into `$HOME/.bun-create/my-boilerplate-name`.
|
||||
|
||||
Before checking Bun’s examples folder, `bun create` checks for a local folder matching the input in:
|
||||
Before checking bun’s examples folder, `bun create` checks for a local folder matching the input in:
|
||||
|
||||
- `$BUN_CREATE_DIR/`
|
||||
- `$HOME/.bun-create/`
|
||||
@@ -946,7 +974,7 @@ export var $eb6819b = $$m({
|
||||
// ... rest of code
|
||||
```
|
||||
|
||||
This makes bundled modules [position-independent](https://en.wikipedia.org/wiki/Position-independent_code). In theory, one could import only the exact modules in-use without reparsing code and without generating a new bundle. One bundle can dynamically become many bundles comprising only the modules in use on the webpage. Thanks to the metadata with the byte offsets, a web server can send each module to browsers [zero-copy](https://en.wikipedia.org/wiki/Zero-copy) using [sendfile](https://man7.org/linux/man-pages/man2/sendfile.2.html). Bun itself is not quite this smart yet, but these optimizations would be useful in production and potentially very useful for React Server Components.
|
||||
This makes bundled modules [position-independent](https://en.wikipedia.org/wiki/Position-independent_code). In theory, one could import only the exact modules in-use without reparsing code and without generating a new bundle. One bundle can dynamically become many bundles comprising only the modules in use on the webpage. Thanks to the metadata with the byte offsets, a web server can send each module to browsers [zero-copy](https://en.wikipedia.org/wiki/Zero-copy) using [sendfile](https://man7.org/linux/man-pages/man2/sendfile.2.html). bun itself is not quite this smart yet, but these optimizations would be useful in production and potentially very useful for React Server Components.
|
||||
|
||||
To see the schema inside, have a look at [`JavascriptBundleContainer`](./src/api/schema.d.ts#:~:text=export%20interface-,JavascriptBundleContainer,-%7B). You can find JavaScript bindings to read the metadata in [src/api/schema.js](./src/api/schema.js). This is not really an API yet. It’s missing the part where it gets the binary data from the bottom of the file. Someday, I want this to be usable by other tools too.
|
||||
|
||||
@@ -968,7 +996,7 @@ This works because every `.bun` file starts with this:
|
||||
#!/usr/bin/env bun
|
||||
```
|
||||
|
||||
To deploy to production with Bun, you’ll want to get the code from the `.bun` file and stick that somewhere your web server can find it (or if you’re using Vercel or a Rails app, in a `public` folder).
|
||||
To deploy to production with bun, you’ll want to get the code from the `.bun` file and stick that somewhere your web server can find it (or if you’re using Vercel or a Rails app, in a `public` folder).
|
||||
|
||||
Note that `.bun` is a binary file format, so just opening it in VSCode or vim might render strangely.
|
||||
|
||||
@@ -987,7 +1015,7 @@ To force bun to bundle packages which are not located in a `node_modules` folder
|
||||
}
|
||||
```
|
||||
|
||||
Bundled dependencies are not eligible for Hot Module Reloading. The code is served to browsers & Bun.js verbatim. But, in the future, it may be sectioned off into only parts of the bundle being used. That’s possible in the current version of the `.bun` file (so long as you know which files are necessary), but it’s not implemented yet. Longer-term, it will include all `import` and `export` of each module inside.
|
||||
Bundled dependencies are not eligible for Hot Module Reloading. The code is served to browsers & bun.js verbatim. But, in the future, it may be sectioned off into only parts of the bundle being used. That’s possible in the current version of the `.bun` file (so long as you know which files are necessary), but it’s not implemented yet. Longer-term, it will include all `import` and `export` of each module inside.
|
||||
|
||||
#### What is the module ID hash?
|
||||
|
||||
@@ -1003,7 +1031,7 @@ Is generated like this:
|
||||
2. Wyhash 64 of the `package.hash` + `package_path`. `package_path` means "relative to the root of the npm package, where is the module imported?". For example, if you imported `react/jsx-dev-runtime.js`, the `package_path` is `jsx-dev-runtime.js`. `react-dom/cjs/react-dom.development.js` would be `cjs/react-dom.development.js`
|
||||
3. Truncate the hash generated above to a `u32`
|
||||
|
||||
The implementation details of this module ID hash will vary between versions of Bun. The important part is the metadata contains the module IDs, the package paths, and the package hashes so it shouldn’t really matter in practice if other tooling wants to make use of any of this.
|
||||
The implementation details of this module ID hash will vary between versions of bun. The important part is the metadata contains the module IDs, the package paths, and the package hashes so it shouldn’t really matter in practice if other tooling wants to make use of any of this.
|
||||
|
||||
### `bun completions`
|
||||
|
||||
@@ -1011,37 +1039,260 @@ This command installs completions for `zsh` and/or `fish`. It’s run automatica
|
||||
|
||||
If you want to copy the completions manually, run `bun completions > path-to-file`. If you know the completions directory to install them to, run `bun completions /path/to/directory`.
|
||||
|
||||
### `Bun.Transpiler`
|
||||
|
||||
`Bun.Transpiler` lets you use Bun's transpiler from JavaScript (available in Bun.js)
|
||||
|
||||
````ts
|
||||
type Loader = "jsx" | "js" | "ts" | "tsx";
|
||||
|
||||
interface TranspilerOptions {
|
||||
// Replace key with value. Value must be a JSON string.
|
||||
// @example
|
||||
// ```
|
||||
// { "process.env.NODE_ENV": "\"production\"" }
|
||||
// ```
|
||||
define: Record<string, string>,
|
||||
|
||||
// What is the default loader used for this transpiler?
|
||||
loader: Loader,
|
||||
|
||||
// What platform are we targeting? This may affect how import and/or require is used
|
||||
platform: "browser" | "bun" | "macro" | "node",
|
||||
|
||||
// TSConfig.json file as stringified JSON or an object
|
||||
// Use this to set a custom JSX factory, fragment, or import source
|
||||
// For example, if you want to use Preact instead of React. Or if you want to use Emotion.
|
||||
tsconfig: string | TSConfig,
|
||||
|
||||
// Replace imports with macros
|
||||
macros: MacroMap,
|
||||
}
|
||||
|
||||
// This lets you use macros
|
||||
interface MacroMap {
|
||||
// @example
|
||||
// ```
|
||||
// {
|
||||
// "react-relay": {
|
||||
// "graphql": "bun-macro-relay/bun-macro-relay.tsx"
|
||||
// }
|
||||
// }
|
||||
// ```
|
||||
[packagePath: string]: {
|
||||
[importItemName: string]: string,
|
||||
},
|
||||
}
|
||||
|
||||
class Bun.Transpiler {
|
||||
constructor(options: TranspilerOptions)
|
||||
|
||||
transform(code: string, loader?: Loader): Promise<string>
|
||||
transformSync(code: string, loader?: Loader): string
|
||||
|
||||
scan(code: string): {exports: string[], imports: Import}
|
||||
scanImports(code: string): Import[]
|
||||
}
|
||||
|
||||
type Import = {
|
||||
path: string,
|
||||
kind:
|
||||
// import foo from 'bar'; in JavaScript
|
||||
| "import-statement"
|
||||
// require("foo") in JavaScript
|
||||
| "require-call"
|
||||
// require.resolve("foo") in JavaScript
|
||||
| "require-resolve"
|
||||
// Dynamic import() in JavaScript
|
||||
| "dynamic-import"
|
||||
// @import() in CSS
|
||||
| "import-rule"
|
||||
// url() in CSS
|
||||
| "url-token"
|
||||
// The import was injected by Bun
|
||||
| "internal"
|
||||
// Entry point
|
||||
// Probably won't see this one
|
||||
| "entry-point"
|
||||
}
|
||||
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
````
|
||||
|
||||
#### `Bun.Transpiler.transformSync`
|
||||
|
||||
This lets you transpile JavaScript, TypeScript, TSX, and JSX using Bun's transpiler. It does not resolve modules.
|
||||
|
||||
It is synchronous and runs in the same thread as other JavaScript code.
|
||||
|
||||
```js
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
transpiler.transformSync("<div>hi!</div>");
|
||||
```
|
||||
|
||||
```js
|
||||
import { __require as require } from "bun:wrap";
|
||||
import * as JSX from "react/jsx-dev-runtime";
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
export default jsx(
|
||||
"div",
|
||||
{
|
||||
children: "hi!",
|
||||
},
|
||||
undefined,
|
||||
false,
|
||||
undefined,
|
||||
this
|
||||
);
|
||||
```
|
||||
|
||||
If a macro is used, it will be run in the same thread as the transpiler, but in a separate event loop from the rest of your application. Currently, globals between macros and regular code are shared, which means it is possible (but not recommended) to share state between macros and regular code. Attempting to use AST nodes outside of a macro is undefined behavior.
|
||||
|
||||
#### `Bun.Transpiler.transform`
|
||||
|
||||
This lets you transpile JavaScript, TypeScript, TSX, and JSX using Bun's transpiler. It does not resolve modules.
|
||||
|
||||
It is async and automatically runs in Bun's worker threadpool. That means if you run it 100 times, it will run it across `Math.floor($cpu_count * 0.8)` threads without blocking the main JavaScript thread.
|
||||
|
||||
If code uses a macro, it will potentially spawn a new copy of Bun.js' JavaScript runtime environment in that new thread.
|
||||
|
||||
Unless you're transpiling _many_ large files, you should probably use `Bun.Transpiler.transformSync`. The cost of the threadpool will often take longer than actually transpiling code.
|
||||
|
||||
```js
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
await transpiler.transform("<div>hi!</div>");
|
||||
```
|
||||
|
||||
```js
|
||||
import { __require as require } from "bun:wrap";
|
||||
import * as JSX from "react/jsx-dev-runtime";
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
export default jsx(
|
||||
"div",
|
||||
{
|
||||
children: "hi!",
|
||||
},
|
||||
undefined,
|
||||
false,
|
||||
undefined,
|
||||
this
|
||||
);
|
||||
```
|
||||
|
||||
You can also pass a `Loader` as a string
|
||||
|
||||
```js
|
||||
await transpiler.transform("<div>hi!</div>", "tsx");
|
||||
```
|
||||
|
||||
#### `Bun.Transpiler.scan`
|
||||
|
||||
This is a fast way to get a list of imports & exports used in a JavaScript/jsx or TypeScript/tsx file.
|
||||
|
||||
This function is synchronous.
|
||||
|
||||
```ts
|
||||
const transpiler = new Bun.Transpiler({ loader: "ts" });
|
||||
|
||||
transpiler.scan(`
|
||||
import React from 'react';
|
||||
import Remix from 'remix';
|
||||
import type {ReactNode} from 'react';
|
||||
|
||||
export const loader = () => import('./loader');
|
||||
`);
|
||||
```
|
||||
|
||||
```ts
|
||||
{
|
||||
"exports": [
|
||||
"loader"
|
||||
],
|
||||
"imports": [
|
||||
{
|
||||
"kind": "import-statement",
|
||||
"path": "react"
|
||||
},
|
||||
{
|
||||
"kind": "import-statement",
|
||||
"path": "remix"
|
||||
},
|
||||
{
|
||||
"kind": "dynamic-import",
|
||||
"path": "./loader"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
#### `Bun.Transpiler.scanImports`
|
||||
|
||||
This is a fast path for getting a list of imports used in a JavaScript/jsx or TypeScript/tsx file. It skips the visiting pass, which means it is faster but less accurate. You probably won't notice a difference between `Bun.Transpiler.scan` and `Bun.Transpiler.scanImports` often. You might notice it for very large files (megabytes).
|
||||
|
||||
This function is synchronous.
|
||||
|
||||
```ts
|
||||
const transpiler = new Bun.Transpiler({ loader: "ts" });
|
||||
|
||||
transpiler.scanImports(`
|
||||
import React from 'react';
|
||||
import Remix from 'remix';
|
||||
import type {ReactNode} from 'react';
|
||||
|
||||
export const loader = () => import('./loader');
|
||||
`);
|
||||
```
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"kind": "import-statement",
|
||||
"path": "react"
|
||||
},
|
||||
{
|
||||
"kind": "import-statement",
|
||||
"path": "remix"
|
||||
},
|
||||
{
|
||||
"kind": "dynamic-import",
|
||||
"path": "./loader"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Environment variables
|
||||
|
||||
- `GOMAXPROCS`: For `bun bun`, this sets the maximum number of threads to use. If you’re experiencing an issue with `bun bun`, try setting `GOMAXPROCS=1` to force bun to run single-threaded
|
||||
- `DISABLE_BUN_ANALYTICS=1` this disables Bun’s analytics. Bun records bundle timings (so we can answer with data, "is bun getting faster?") and feature usage (e.g. "are people actually using macros?"). The request body size is about 60 bytes, so it’s not a lot of data
|
||||
- `DISABLE_BUN_ANALYTICS=1` this disables bun’s analytics. bun records bundle timings (so we can answer with data, "is bun getting faster?") and feature usage (e.g. "are people actually using macros?"). The request body size is about 60 bytes, so it’s not a lot of data
|
||||
- `TMPDIR`: Before `bun bun` completes, it stores the new `.bun` in `$TMPDIR`. If unset, `TMPDIR` defaults to the platform-specific temporary directory (on Linux, `/tmp` and on macOS `/private/tmp`)
|
||||
|
||||
## Credits
|
||||
|
||||
- While written in Zig instead of Go, Bun’s JS transpiler, CSS lexer, and node module resolver source code is based off of @evanw’s esbuild project. @evanw did a fantastic job with esbuild.
|
||||
- While written in Zig instead of Go, bun’s JS transpiler, CSS lexer, and node module resolver source code is based off of @evanw’s esbuild project. @evanw did a fantastic job with esbuild.
|
||||
- The idea for the name "bun" came from [@kipply](https://github.com/kipply)
|
||||
|
||||
## License
|
||||
|
||||
Bun itself is MIT-licensed.
|
||||
bun itself is MIT-licensed.
|
||||
|
||||
However, JavaScriptCore (and WebKit) is LGPL-2 and Bun statically links it.
|
||||
However, JavaScriptCore (and WebKit) is LGPL-2 and bun statically links it.
|
||||
|
||||
Per LGPL2:
|
||||
|
||||
> (1) If you statically link against an LGPL’d library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
|
||||
|
||||
You can find the patched version of WebKit used by Bun here: <https://github.com/jarred-sumner/webkit>. If you would like to relink Bun with changes:
|
||||
You can find the patched version of WebKit used by bun here: <https://github.com/jarred-sumner/webkit>. If you would like to relink bun with changes:
|
||||
|
||||
- `git submodule update --init --recursive`
|
||||
- `make jsc`
|
||||
- `zig build`
|
||||
|
||||
This compiles JavaScriptCore, compiles Bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
This compiles JavaScriptCore, compiles bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
|
||||
To successfully run `zig build`, you will need to install a patched version of Zig available here: <https://github.com/jarred-sumner/zig/tree/jarred/zig-sloppy>.
|
||||
|
||||
Bun also statically links these libraries:
|
||||
bun also statically links these libraries:
|
||||
|
||||
- `libicu` 66.1, which can be found here: <https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE>
|
||||
- [`picohttp`](https://github.com/h2o/picohttpparser), which is dual-licensed under the Perl License or the MIT License
|
||||
@@ -1051,7 +1302,7 @@ Bun also statically links these libraries:
|
||||
- [`libarchive`](https://github.com/libarchive/libarchive), which has [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING)
|
||||
- [`libiconv`](https://www.gnu.org/software/libiconv/), which is LGPL2. It’s a dependency of libarchive.
|
||||
|
||||
For compatibiltiy reasons, these NPM packages are embedded into Bun’s binary and injected if imported.
|
||||
For compatibiltiy reasons, these NPM packages are embedded into bun’s binary and injected if imported.
|
||||
|
||||
- [`assert`](https://npmjs.com/package/assert) (MIT license)
|
||||
- [`browserify-zlib`](https://npmjs.com/package/browserify-zlib) (MIT license)
|
||||
@@ -1075,7 +1326,7 @@ For compatibiltiy reasons, these NPM packages are embedded into Bun’s binary a
|
||||
- [`util`](https://npmjs.com/package/util) (MIT license)
|
||||
- [`vm-browserify`](https://npmjs.com/package/vm-browserify) (MIT license)
|
||||
|
||||
## Developing Bun
|
||||
## Developing bun
|
||||
|
||||
Estimated: 30-90 minutes :(
|
||||
|
||||
@@ -1098,7 +1349,7 @@ devcontainer build
|
||||
devcontainer open
|
||||
```
|
||||
|
||||
You will need to clone the GitHub repository inside that container, which also requires authenticating with GitHub (until Bun's repository is public). Make sure to login with a Personal Access Token rather than a web browser.
|
||||
You will need to clone the GitHub repository inside that container, which also requires authenticating with GitHub (until bun's repository is public). Make sure to login with a Personal Access Token rather than a web browser.
|
||||
|
||||
Inside the container, run this:
|
||||
|
||||
@@ -1113,7 +1364,7 @@ git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init
|
||||
# Compile bun dependencies (zig is already compiled)
|
||||
make devcontainer
|
||||
|
||||
# Build Bun for development
|
||||
# Build bun for development
|
||||
make dev
|
||||
|
||||
# Run bun
|
||||
@@ -1124,44 +1375,31 @@ It is very similar to my own development environment.
|
||||
|
||||
### MacOS
|
||||
|
||||
Install LLVM 12 and homebrew dependencies:
|
||||
Install LLVM 13 and homebrew dependencies:
|
||||
|
||||
```bash
|
||||
brew install llvm@12 coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config
|
||||
brew install llvm@13 coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config
|
||||
```
|
||||
|
||||
Bun (& the version of Zig) need LLVM 12 and Clang 12 (clang is part of LLVM). Weird build & runtime errors will happen otherwise.
|
||||
bun (& the version of Zig) need LLVM 13 and Clang 13 (clang is part of LLVM). Weird build & runtime errors will happen otherwise.
|
||||
|
||||
Make sure LLVM 12 is in your `$PATH`:
|
||||
Make sure LLVM 13 is in your `$PATH`:
|
||||
|
||||
```bash
|
||||
which clang-12
|
||||
which clang-13
|
||||
```
|
||||
|
||||
If it is not, you will have to run this to link it:
|
||||
|
||||
```bash
|
||||
export PATH=$(brew --prefix llvm@12)/bin:$PATH
|
||||
export LDFLAGS="$LDFLAGS -L$(brew --prefix llvm@12)/lib"
|
||||
export CPPFLAGS="$CPPFLAGS -I$(brew --prefix llvm@12)/include"
|
||||
export PATH=$(brew --prefix llvm@13)/bin:$PATH
|
||||
export LDFLAGS="$LDFLAGS -L$(brew --prefix llvm@13)/lib"
|
||||
export CPPFLAGS="$CPPFLAGS -I$(brew --prefix llvm@13)/include"
|
||||
```
|
||||
|
||||
On fish that looks like `fish_add_path (brew --prefix llvm@12)/bin`
|
||||
On fish that looks like `fish_add_path (brew --prefix llvm@13)/bin`
|
||||
|
||||
#### Compile Zig (macOS)
|
||||
|
||||
```bash
|
||||
git clone https://github.com/jarred-sumner/zig
|
||||
cd zig
|
||||
git checkout jarred/zig-sloppy-with-small-structs
|
||||
cmake . -DCMAKE_PREFIX_PATH=$(brew --prefix llvm@12) -DZIG_STATIC_LLVM=ON -DCMAKE_BUILD_TYPE=Release && make -j 16
|
||||
```
|
||||
|
||||
Note that `brew install zig` won’t work. Bun uses a build of Zig with a couple patches.
|
||||
|
||||
Additionally, you’ll need `cmake`, `npm` and `esbuild` installed globally.
|
||||
|
||||
You’ll want to make sure `zig` is in `$PATH`. The `zig` binary wil be in the same folder as the newly-cloned `zig` repo. If you use fish, you can run `fish_add_path (pwd)`.
|
||||
You’ll want to make sure `zig` is in `$PATH`. The specific version of Zig expected is the HEAD in [Jarred-Sumner/zig](https://github.com/Jarred-Sumner/zig).
|
||||
|
||||
#### Build bun (macOS)
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@ This benchmarks bundler performance for CSS hot reloading.
|
||||
|
||||
## Results
|
||||
|
||||
Bun is 14x faster than Next.js at hot reloading CSS.
|
||||
bun is 14x faster than Next.js at hot reloading CSS.
|
||||
|
||||
```
|
||||
Bun v0.0.34
|
||||
bun v0.0.34
|
||||
Saving every 16ms
|
||||
|
||||
Frame time:
|
||||
@@ -40,7 +40,7 @@ The intent is to be as accurate as possible. Measuring times reported client-sid
|
||||
|
||||
It works like this:
|
||||
|
||||
1. `browser.js` loads either Bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
1. `browser.js` loads either bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
2. `color-looper.zig` updates [`./src/colors.css`](./src/colors.css) in a loop up to `1024` times (1024 is arbitrary), sleeping every `16`ms or `32`ms (a CLI arg you can pass it). The `var(--timestamp)` CSS variable contains the UTC timestamp with precision of milliseconds and one extra decimal point
|
||||
3. `color-looper.zig` automatically records the screen via `screencapture` (builtin on macOS) and saves it, along with a `BigUint64Array` containing all the expected timestamps. When it's done, it writes to a designated file on disk which `browser.js` picks up as the signal to close the browser.
|
||||
4. `ffmpeg` converts each frame into a black and white `.tif` file, which `tesseract` then OCRs
|
||||
|
||||
78
bench/snippets/callbacks-overhead.mjs
Normal file
78
bench/snippets/callbacks-overhead.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
const iterations = 1_000;
|
||||
|
||||
export var report = {
|
||||
async: 0,
|
||||
callback: 0,
|
||||
sync: 0,
|
||||
then: 0,
|
||||
};
|
||||
|
||||
const tests = {
|
||||
callback(n, cb) {
|
||||
if (n === iterations) return cb();
|
||||
tests.callback(1 + n, () => cb());
|
||||
},
|
||||
|
||||
sync(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
tests.sync(1 + n);
|
||||
},
|
||||
|
||||
async async(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
await tests.async(1 + n);
|
||||
},
|
||||
|
||||
then(n) {
|
||||
if (n === iterations) return;
|
||||
return Promise.resolve(1 + n).then(tests.then);
|
||||
},
|
||||
};
|
||||
|
||||
async function test(log) {
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.async(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`async/await: ${(report.async = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.callback(0, function () {
|
||||
if (log)
|
||||
console.log(
|
||||
`callback: ${(report.callback = (performance.now() - a).toFixed(
|
||||
4
|
||||
))}ms`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.then(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`then: ${(report.then = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.sync(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`sync: ${(report.sync = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let warmup = 10;
|
||||
while (warmup--) await test();
|
||||
|
||||
await test(true);
|
||||
5
bench/snippets/cat.mjs
Normal file
5
bench/snippets/cat.mjs
Normal file
@@ -0,0 +1,5 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv.slice(1);
|
||||
for (let i = 0; i < count; i++)
|
||||
console.log(arg.map((file) => readFileSync(file, "utf8")).join(""));
|
||||
3
bench/snippets/copyfile.mjs
Normal file
3
bench/snippets/copyfile.mjs
Normal file
@@ -0,0 +1,3 @@
|
||||
import { copyFileSync } from "node:fs";
|
||||
const arg = process.argv.slice(2);
|
||||
copyFileSync(arg[0], arg[1]);
|
||||
6
bench/snippets/exists.js
Normal file
6
bench/snippets/exists.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const { existsSync } = require("fs");
|
||||
const cwd = process.cwd();
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
for (let i = 0; i < count; i++) existsSync(cwd);
|
||||
12
bench/snippets/package.json
Normal file
12
bench/snippets/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "snippets",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"esbuild": "^0.14.12"
|
||||
}
|
||||
}
|
||||
4
bench/snippets/realpath.mjs
Normal file
4
bench/snippets/realpath.mjs
Normal file
@@ -0,0 +1,4 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
for (let i = 0; i < count; i++) realpathSync(arg);
|
||||
BIN
bench/snippets/scanner/bun.lockb
Executable file
BIN
bench/snippets/scanner/bun.lockb
Executable file
Binary file not shown.
6
bench/snippets/scanner/package.json
Normal file
6
bench/snippets/scanner/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "scan",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.14.11"
|
||||
}
|
||||
}
|
||||
15
bench/snippets/scanner/remix-route.ts
Normal file
15
bench/snippets/scanner/remix-route.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export const action: ActionFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export default function PostRoute() {
|
||||
const params = useParams();
|
||||
console.log(params.postId);
|
||||
}
|
||||
15
bench/snippets/scanner/scan-imports-only.js
Normal file
15
bench/snippets/scanner/scan-imports-only.js
Normal file
@@ -0,0 +1,15 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const imports = transpiler.scanImports(
|
||||
readFileSync("remix-route.ts", "utf8")
|
||||
);
|
||||
}
|
||||
console.timeEnd("Get exports");
|
||||
21
bench/snippets/scanner/scan.bun.js
Normal file
21
bench/snippets/scanner/scan.bun.js
Normal file
@@ -0,0 +1,21 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const file = readFileSync("remix-route.ts", "utf8");
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const { imports, exports } = transpiler.scan(file);
|
||||
|
||||
for (let j = 0; j < fixture.length; j++) {
|
||||
if (fixture[j] !== exports[j]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
42
bench/snippets/scanner/scan.node-esbuild.mjs
Normal file
42
bench/snippets/scanner/scan.node-esbuild.mjs
Normal file
@@ -0,0 +1,42 @@
|
||||
import { build, buildSync } from "esbuild";
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const opts = {
|
||||
metafile: true,
|
||||
format: "esm",
|
||||
platform: "neutral",
|
||||
write: false,
|
||||
logLevel: "silent",
|
||||
stdin: {
|
||||
contents: readFileSync("remix-route.ts", "utf8"),
|
||||
loader: "ts",
|
||||
sourcefile: "remix-route.js",
|
||||
},
|
||||
};
|
||||
|
||||
const getExports = ({ metafile }) => {
|
||||
for (let i = 0; i < fixture.length; i++) {
|
||||
if (fixture[i] !== metafile.outputs["stdin.js"].exports[i]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
console.time("Get exports");
|
||||
|
||||
if (!process.env.SYNC) {
|
||||
var promises = new Array(ITERATIONS);
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
promises[i] = build(opts).then(getExports);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
} else {
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
getExports(buildSync(opts));
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
58
bench/snippets/transpiler.mjs
Normal file
58
bench/snippets/transpiler.mjs
Normal file
@@ -0,0 +1,58 @@
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
var transformSync;
|
||||
var transform;
|
||||
var opts;
|
||||
if (process.isBun) {
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
transformSync = transpiler.transformSync.bind(transpiler);
|
||||
transform = transpiler.transform.bind(transpiler);
|
||||
opts = "jsx";
|
||||
} else if (process.env["esbuild"]) {
|
||||
try {
|
||||
const esbuild = await import("esbuild");
|
||||
transformSync = esbuild.transformSync;
|
||||
transform = esbuild.transform;
|
||||
opts = { loader: "jsx" };
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["swc"]) {
|
||||
try {
|
||||
const swc = await import("@swc/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["babel"]) {
|
||||
try {
|
||||
const swc = await import("@babel/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
presets: [(await import("@babel/preset-react")).default],
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
||||
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
|
||||
|
||||
if (process.env.ASYNC) {
|
||||
console.log(await transform(code, opts));
|
||||
} else {
|
||||
console.log(transformSync(code, opts));
|
||||
}
|
||||
141
build.zig
141
build.zig
@@ -40,7 +40,7 @@ const color_map = std.ComptimeStringMap([]const u8, .{
|
||||
&.{ "yellow", "33m" },
|
||||
});
|
||||
|
||||
fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: *std.mem.Allocator, target: anytype) !void {
|
||||
fn addInternalPackages(step: *std.build.LibExeObjStep, _: std.mem.Allocator, target: anytype) !void {
|
||||
var boringssl: std.build.Pkg = .{
|
||||
.name = "boringssl",
|
||||
.path = pkgPath("src/deps/boringssl.zig"),
|
||||
@@ -51,6 +51,11 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: *std.mem.Alloc
|
||||
.path = pkgPath("src/thread_pool.zig"),
|
||||
};
|
||||
|
||||
var crash_reporter: std.build.Pkg = .{
|
||||
.name = "crash_reporter",
|
||||
.path = pkgPath("src/deps/backtrace.zig"),
|
||||
};
|
||||
|
||||
var picohttp: std.build.Pkg = .{
|
||||
.name = "picohttp",
|
||||
.path = pkgPath("src/deps/picohttp.zig"),
|
||||
@@ -85,21 +90,40 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: *std.mem.Alloc
|
||||
.path = pkgPath("src/http_client_async.zig"),
|
||||
};
|
||||
|
||||
var network_thread: std.build.Pkg = .{
|
||||
.name = "network_thread",
|
||||
.path = pkgPath("src/http/network_thread.zig"),
|
||||
var javascript_core: std.build.Pkg = .{
|
||||
.name = "javascript_core",
|
||||
.path = pkgPath("src/jsc.zig"),
|
||||
};
|
||||
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
var analytics: std.build.Pkg = .{
|
||||
.name = "analytics",
|
||||
.path = pkgPath("src/analytics.zig"),
|
||||
};
|
||||
|
||||
network_thread.dependencies = &.{
|
||||
io.dependencies = &.{analytics};
|
||||
|
||||
javascript_core.dependencies = &.{ http, strings, picohttp, io };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
http.dependencies = &.{ io, network_thread, strings, boringssl, picohttp };
|
||||
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
http.dependencies = &.{ io, network_thread, thread_pool, strings, boringssl, picohttp };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
|
||||
thread_pool.dependencies = &.{
|
||||
io,
|
||||
http,
|
||||
};
|
||||
|
||||
step.addPackage(thread_pool);
|
||||
step.addPackage(picohttp);
|
||||
@@ -107,8 +131,9 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: *std.mem.Alloc
|
||||
step.addPackage(strings);
|
||||
step.addPackage(clap);
|
||||
step.addPackage(http);
|
||||
step.addPackage(network_thread);
|
||||
step.addPackage(boringssl);
|
||||
step.addPackage(javascript_core);
|
||||
step.addPackage(crash_reporter);
|
||||
}
|
||||
var output_dir: []const u8 = "";
|
||||
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
|
||||
@@ -154,7 +179,6 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
mode = b.standardReleaseOptions();
|
||||
|
||||
var cwd_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const cwd: []const u8 = b.pathFromRoot(".");
|
||||
var exe: *std.build.LibExeObjStep = undefined;
|
||||
var output_dir_buf = std.mem.zeroes([4096]u8);
|
||||
@@ -168,10 +192,12 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
if (std.mem.eql(u8, os_tagname, "macos")) {
|
||||
os_tagname = "darwin";
|
||||
if (arch.isAARCH64()) {
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
} else if (arch.isX86()) {
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
}
|
||||
} else if (target.isLinux()) {
|
||||
target.setGnuLibCVersion(2, 27, 0);
|
||||
}
|
||||
|
||||
std.mem.copy(
|
||||
@@ -195,67 +221,12 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
|
||||
output_dir = b.pathFromRoot(output_dir_base);
|
||||
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
|
||||
|
||||
if (target.getOsTag() == .wasi) {
|
||||
exe.enable_wasmtime = true;
|
||||
exe = b.addExecutable(bun_executable_name, "src/main_wasi.zig");
|
||||
exe.linkage = .dynamic;
|
||||
exe.setOutputDir(output_dir);
|
||||
} else if (target.getCpuArch().isWasm()) {
|
||||
// exe = b.addExecutable(
|
||||
// "bun",
|
||||
// "src/main_wasm.zig",
|
||||
// );
|
||||
// exe.is_linking_libc = false;
|
||||
// exe.is_dynamic = true;
|
||||
var lib = b.addExecutable(bun_executable_name, "src/main_wasm.zig");
|
||||
lib.single_threaded = true;
|
||||
// exe.want_lto = true;
|
||||
// exe.linkLibrary(lib);
|
||||
|
||||
if (mode == std.builtin.Mode.Debug) {
|
||||
// exception_handling
|
||||
var features = target.getCpuFeatures();
|
||||
features.addFeature(2);
|
||||
target.updateCpuFeatures(&features);
|
||||
} else {
|
||||
// lib.strip = true;
|
||||
}
|
||||
|
||||
lib.setOutputDir(output_dir);
|
||||
lib.want_lto = true;
|
||||
b.install_path = lib.getOutputSource().getPath(b);
|
||||
|
||||
std.debug.print("Build: ./{s}\n", .{b.install_path});
|
||||
b.default_step.dependOn(&lib.step);
|
||||
b.verbose_link = true;
|
||||
lib.setTarget(target);
|
||||
lib.setBuildMode(mode);
|
||||
|
||||
std.fs.deleteTreeAbsolute(std.fs.path.join(b.allocator, &.{ cwd, lib.getOutputSource().getPath(b) }) catch unreachable) catch {};
|
||||
var install = b.getInstallStep();
|
||||
lib.strip = false;
|
||||
lib.install();
|
||||
|
||||
const run_cmd = lib.run();
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
return;
|
||||
} else {
|
||||
exe = b.addExecutable(bun_executable_name, "src/main.zig");
|
||||
}
|
||||
exe = b.addExecutable(bun_executable_name, "src/main.zig");
|
||||
// exe.setLibCFile("libc.txt");
|
||||
exe.linkLibC();
|
||||
// exe.linkLibCpp();
|
||||
|
||||
exe.setOutputDir(output_dir);
|
||||
var cwd_dir = std.fs.cwd();
|
||||
updateRuntime() catch {};
|
||||
|
||||
exe.setTarget(target);
|
||||
@@ -266,19 +237,17 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
typings_exe.setMainPkgPath(b.pathFromRoot("."));
|
||||
|
||||
// exe.want_lto = true;
|
||||
defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
|
||||
defer b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
target.getOsVersionMin().semver,
|
||||
target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
|
||||
{
|
||||
b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}",
|
||||
.{
|
||||
triplet,
|
||||
target.getOsVersionMin().semver,
|
||||
target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
}
|
||||
|
||||
var obj_step = b.step("obj", "Build Bun as a .o file");
|
||||
var obj_step = b.step("obj", "Build bun as a .o file");
|
||||
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
|
||||
|
||||
{
|
||||
@@ -312,14 +281,17 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = true;
|
||||
obj.omit_frame_pointer = false;
|
||||
|
||||
b.default_step.dependOn(&obj.step);
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
|
||||
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
|
||||
log_step.step.dependOn(&obj.step);
|
||||
}
|
||||
@@ -422,7 +394,7 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar
|
||||
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
|
||||
const arm_brew_prefix: []const u8 = "/opt/homebrew";
|
||||
const x86_brew_prefix: []const u8 = "/usr/local";
|
||||
try dirs_to_search.append(b.env_map.get("BUN_DEPS_DIR") orelse @as([]const u8, b.pathFromRoot("src/deps")));
|
||||
try dirs_to_search.append(b.env_map.get("BUN_DEPS_OUT_DIR") orelse b.env_map.get("BUN_DEPS_DIR") orelse @as([]const u8, b.pathFromRoot("src/deps")));
|
||||
if (target.getOsTag() == .macos) {
|
||||
if (target.getCpuArch().isAARCH64()) {
|
||||
try dirs_to_search.append(comptime arm_brew_prefix ++ "/opt/icu4c/lib/");
|
||||
@@ -450,10 +422,11 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar
|
||||
.{ "libJavaScriptCore.a", "libJavaScriptCore.a" },
|
||||
.{ "libWTF.a", "libWTF.a" },
|
||||
.{ "libbmalloc.a", "libbmalloc.a" },
|
||||
.{ "libbacktrace.a", "libbacktrace.a" },
|
||||
});
|
||||
|
||||
for (dirs_to_search.slice()) |deps_path| {
|
||||
var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch |err| @panic("Failed to open dependencies directory");
|
||||
var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch @panic("Failed to open dependencies directory");
|
||||
var iterator = deps_dir.iterate();
|
||||
|
||||
while (iterator.next() catch null) |entr| {
|
||||
@@ -476,6 +449,7 @@ pub fn configureObjectStep(obj: *std.build.LibExeObjStep, target: anytype, main_
|
||||
try addInternalPackages(obj, std.heap.page_allocator, target);
|
||||
addPicoHTTP(obj, false);
|
||||
|
||||
obj.strip = false;
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
obj.linkLibC();
|
||||
@@ -485,6 +459,7 @@ pub fn configureObjectStep(obj: *std.build.LibExeObjStep, target: anytype, main_
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,14 +94,14 @@ complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'react' -d 'new React project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade Bun to the latest version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade bun to the latest version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'Bun\'s version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'bun\'s version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open Bun\'s Discord server' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open bun\'s Discord server' -x
|
||||
|
||||
|
||||
complete -c bun \
|
||||
|
||||
@@ -15,9 +15,9 @@ _bun() {
|
||||
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
|
||||
compadd $scripts_list && ret=0
|
||||
|
||||
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of Bun"')
|
||||
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of bun"')
|
||||
main_commands=($main_commands)
|
||||
_alternative "args:Bun:(($main_commands))"
|
||||
_alternative "args:bun:(($main_commands))"
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
|
||||
@@ -58,7 +58,7 @@ subcommands:
|
||||
summary: Use a framework, e.g. "next"
|
||||
|
||||
upgrade:
|
||||
summary: Upgrade to the latest version of Bun
|
||||
summary: Upgrade to the latest version of bun
|
||||
|
||||
dev:
|
||||
summary: Start a dev server
|
||||
|
||||
44
docs/bun-flavored-toml.md
Normal file
44
docs/bun-flavored-toml.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# Bun-flavored TOML
|
||||
|
||||
[TOML](https://toml.io/) is a minimal configuration file format designed to be easy for humans to read.
|
||||
|
||||
Bun implements a TOML parser with a few tweaks designed for better interopability with INI files and with JavaScript.
|
||||
|
||||
### ; and # are comments
|
||||
|
||||
In Bun-flavored TOML, comments start with `#` or `;`
|
||||
|
||||
```ini
|
||||
# This is a comment
|
||||
; This is also a comment
|
||||
```
|
||||
|
||||
This matches the behavior of INI files.
|
||||
|
||||
In TOML, comments start with `#`
|
||||
|
||||
```toml
|
||||
# This is a comment
|
||||
```
|
||||
|
||||
### String escape characters
|
||||
|
||||
Bun-flavored adds a few more escape sequences to TOML to work better with JavaScript strings.
|
||||
|
||||
```
|
||||
# Bun-flavored TOML extras
|
||||
\x{XX} - ASCII (U+00XX)
|
||||
\u{x+} - unicode (U+0000000X) - (U+XXXXXXXX)
|
||||
\v - vertical tab
|
||||
|
||||
# Regular TOML
|
||||
\b - backspace (U+0008)
|
||||
\t - tab (U+0009)
|
||||
\n - linefeed (U+000A)
|
||||
\f - form feed (U+000C)
|
||||
\r - carriage return (U+000D)
|
||||
\" - quote (U+0022)
|
||||
\\ - backslash (U+005C)
|
||||
\uXXXX - unicode (U+XXXX)
|
||||
\UXXXXXXXX - unicode (U+XXXXXXXX)
|
||||
```
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# The important part of this test: make sure that Bun.js successfully loads
|
||||
# The most likely reason for this test to fail is that something broke in the JavaScriptCore <> Bun integration
|
||||
# The important part of this test: make sure that bun.js successfully loads
|
||||
# The most likely reason for this test to fail is that something broke in the JavaScriptCore <> bun integration
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
rm -rf /tmp/next-app
|
||||
|
||||
53
integration/apps/bun-dev-index-html.sh
Normal file
53
integration/apps/bun-dev-index-html.sh
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-dev-check)
|
||||
|
||||
index_content="<html><body>index.html</body></html>"
|
||||
bacon_content="<html><body>bacon.html</body></html>"
|
||||
js_content="console.log('hi')"
|
||||
|
||||
echo $index_content >"$dir/index.html"
|
||||
echo $js_content >"$dir/index.js"
|
||||
echo $bacon_content >"$dir/bacon.html"
|
||||
|
||||
cd $dir
|
||||
|
||||
$BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index.html)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/foo/foo)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon.html)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
echo "✅ bun dev index html check passed."
|
||||
55
integration/apps/bun-dev.sh
Normal file
55
integration/apps/bun-dev.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-dev-check)
|
||||
|
||||
index_content="<html><body>index.html</body></html>"
|
||||
bacon_content="<html><body>bacon.html</body></html>"
|
||||
js_content="console.log('hi')"
|
||||
|
||||
mkdir -p $dir/public
|
||||
|
||||
echo $index_content >"$dir/public/index.html"
|
||||
echo $js_content >"$dir/index.js"
|
||||
echo $bacon_content >"$dir/public/bacon.html"
|
||||
|
||||
cd $dir
|
||||
|
||||
$BUN_BIN --port 8087 &
|
||||
sleep 0.005
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/index.html)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/foo/foo)" != "$index_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/index.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(curl --fail http://localhost:8087/bacon.html)" != "$bacon_content" ]; then
|
||||
echo "ERR: Expected '$index_content', got '$(curl --fail http://localhost:8087/bacon.html)'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
killall -9 $(basename $BUN_BIN) || echo ""
|
||||
echo "✅ bun dev index html check passed."
|
||||
79
integration/apps/bun-install.sh
Normal file
79
integration/apps/bun-install.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
dir=$(mktemp -d --suffix=bun-install-test-1)
|
||||
|
||||
cd $dir
|
||||
${NPM_CLIENT:-$(which bun)} add react react-dom @types/react
|
||||
|
||||
echo "console.log(typeof require(\"react\").createElement);" >index.js
|
||||
chmod +x index.js
|
||||
|
||||
JS_RUNTIME=${JS_RUNTIME:-"$(which bun)"}
|
||||
|
||||
if [ "$JS_RUNTIME" == "node" ]; then
|
||||
result="$(node ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$JS_RUNTIME" != "node" ]; then
|
||||
result="$($JS_RUNTIME run ./index.js)"
|
||||
fi
|
||||
|
||||
echo "console.log(typeof require(\"react-dom\").render);" >index.js
|
||||
chmod +x index.js
|
||||
|
||||
JS_RUNTIME=${JS_RUNTIME:-"$(which bun)"}
|
||||
|
||||
if [ "$JS_RUNTIME" == "node" ]; then
|
||||
result="$(node ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$JS_RUNTIME" != "node" ]; then
|
||||
result="$($JS_RUNTIME run ./index.js)"
|
||||
fi
|
||||
|
||||
if [ "$result" != "function" ]; then
|
||||
echo "ERR: Expected 'function', got '$result'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove react-dom
|
||||
|
||||
if [ -d "node_modules/react-dom" ]; then
|
||||
echo "ERR: react-dom module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
yarn_dot_lock=$(${NPM_CLIENT:-$(which bun)} bun.lockb)
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "react-dom"; then
|
||||
echo "ERR: react-dom module still exists in lockfile"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove @types/react
|
||||
|
||||
yarn_dot_lock=$(${NPM_CLIENT:-$(which bun)} bun.lockb)
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "@types/react"; then
|
||||
echo "ERR: @types/react module still exists in lockfile"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if echo "$yarn_dot_lock" | grep -q "@types/react"; then
|
||||
echo "ERR: @types/react module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${NPM_CLIENT:-$(which bun)} remove react
|
||||
|
||||
if [ -d "node_modules/react" ]; then
|
||||
echo "ERR: react module still exists in $dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d "bun.lockb" ]; then
|
||||
echo "ERR: empty bun.lockb should be deleted"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
node_modules/always-bundled-module
|
||||
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
default: 0xdeadbeef,
|
||||
default() {
|
||||
return "ok";
|
||||
},
|
||||
default: true,
|
||||
ok() {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,5 @@
|
||||
const __esModule = true;
|
||||
|
||||
export const foo = () => __esModule;
|
||||
|
||||
export { __esModule, foo as default };
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "always-bundled-module",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
13
integration/bunjs-only-snippets/bundled/entrypoint.ts
Normal file
13
integration/bunjs-only-snippets/bundled/entrypoint.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import "i-am-bundled/cjs";
|
||||
import "i-am-bundled/esm";
|
||||
import "always-bundled-module/esm";
|
||||
import "always-bundled-module/cjs";
|
||||
import { foo } from "i-am-bundled/esm";
|
||||
import { foo as foo2 } from "always-bundled-module/esm";
|
||||
import cJS from "always-bundled-module/cjs";
|
||||
|
||||
foo();
|
||||
foo2();
|
||||
cJS();
|
||||
|
||||
export default cJS();
|
||||
12
integration/bunjs-only-snippets/bundled/package.json
Normal file
12
integration/bunjs-only-snippets/bundled/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "to-bundle",
|
||||
"scripts": {
|
||||
"prebundle": "rm -rf node_modules; cp -r to_bundle_node_modules node_modules; ln -s always-bundled-module node_modules/always-bundled-module",
|
||||
"bundle": "${BUN_BIN:-$(which bun)} bun ./entrypoint.ts"
|
||||
},
|
||||
"bun": {
|
||||
"alwaysBundle": [
|
||||
"always-bundled-module"
|
||||
]
|
||||
}
|
||||
}
|
||||
10
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/cjs.js
generated
Normal file
10
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/cjs.js
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
default: 0xdeadbeef,
|
||||
default() {
|
||||
return "ok";
|
||||
},
|
||||
default: true,
|
||||
ok() {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
5
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/esm.js
generated
Normal file
5
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/esm.js
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
const __esModule = true;
|
||||
|
||||
export const foo = () => __esModule;
|
||||
|
||||
export { __esModule, foo as default };
|
||||
4
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/package.json
generated
Normal file
4
integration/bunjs-only-snippets/bundled/to_bundle_node_modules/i-am-bundled/package.json
generated
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "i-am-bundled",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
6
integration/bunjs-only-snippets/bundled/tsconfig.json
Normal file
6
integration/bunjs-only-snippets/bundled/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"paths": {},
|
||||
"baseUrl": "."
|
||||
}
|
||||
}
|
||||
58
integration/bunjs-only-snippets/console-log.js
Normal file
58
integration/bunjs-only-snippets/console-log.js
Normal file
@@ -0,0 +1,58 @@
|
||||
console.log("Hello World!");
|
||||
console.log(123);
|
||||
console.log(-123);
|
||||
console.log(123.567);
|
||||
console.log(-123.567);
|
||||
console.log(true);
|
||||
console.log(false);
|
||||
console.log(null);
|
||||
console.log(undefined);
|
||||
console.log(Symbol("Symbol Description"));
|
||||
console.log(new Date(2021, 12, 30, 666, 777, 888, 999));
|
||||
console.log([123, 456, 789]);
|
||||
console.log({ a: 123, b: 456, c: 789 });
|
||||
console.log({
|
||||
a: {
|
||||
b: {
|
||||
c: 123,
|
||||
},
|
||||
bacon: true,
|
||||
},
|
||||
});
|
||||
|
||||
console.log(new Promise(() => {}));
|
||||
|
||||
class Foo {}
|
||||
|
||||
console.log(() => {});
|
||||
console.log(Foo);
|
||||
console.log(new Foo());
|
||||
console.log(function foooo() {});
|
||||
|
||||
console.log(/FooRegex/);
|
||||
|
||||
console.error("uh oh");
|
||||
console.time("Check");
|
||||
|
||||
console.log(
|
||||
"Is it a bug or a feature that formatting numbers like %d is colored",
|
||||
123
|
||||
);
|
||||
console.log(globalThis);
|
||||
|
||||
console.log(
|
||||
"String %s should be 2nd word, 456 == %s and percent s %s == %s",
|
||||
"123",
|
||||
"456",
|
||||
"%s",
|
||||
"What",
|
||||
"okay"
|
||||
);
|
||||
|
||||
const infinteLoop = {
|
||||
foo: {},
|
||||
bar: {},
|
||||
};
|
||||
|
||||
infinteLoop.bar = infinteLoop;
|
||||
console.log(infinteLoop, "am");
|
||||
17
integration/bunjs-only-snippets/fetch.js
Normal file
17
integration/bunjs-only-snippets/fetch.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import fs from "fs";
|
||||
|
||||
const urls = ["https://example.com", "http://example.com"];
|
||||
for (let url of urls) {
|
||||
const response = await fetch(url);
|
||||
const text = await response.text();
|
||||
|
||||
if (
|
||||
fs.readFileSync(
|
||||
import.meta.path.substring(0, import.meta.path.lastIndexOf("/")) +
|
||||
"/fetch.js.txt",
|
||||
"utf8"
|
||||
) !== text
|
||||
) {
|
||||
throw new Error("Expected fetch.js.txt to match snapshot");
|
||||
}
|
||||
}
|
||||
46
integration/bunjs-only-snippets/fetch.js.txt
Normal file
46
integration/bunjs-only-snippets/fetch.js.txt
Normal file
@@ -0,0 +1,46 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Example Domain</title>
|
||||
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<style type="text/css">
|
||||
body {
|
||||
background-color: #f0f0f2;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-family: -apple-system, system-ui, BlinkMacSystemFont, "Segoe UI", "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
|
||||
}
|
||||
div {
|
||||
width: 600px;
|
||||
margin: 5em auto;
|
||||
padding: 2em;
|
||||
background-color: #fdfdff;
|
||||
border-radius: 0.5em;
|
||||
box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);
|
||||
}
|
||||
a:link, a:visited {
|
||||
color: #38488f;
|
||||
text-decoration: none;
|
||||
}
|
||||
@media (max-width: 700px) {
|
||||
div {
|
||||
margin: 0 auto;
|
||||
width: auto;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div>
|
||||
<h1>Example Domain</h1>
|
||||
<p>This domain is for use in illustrative examples in documents. You may use this
|
||||
domain in literature without prior coordination or asking for permission.</p>
|
||||
<p><a href="https://www.iana.org/domains/example">More information...</a></p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
23
integration/bunjs-only-snippets/fs-stream.js
Normal file
23
integration/bunjs-only-snippets/fs-stream.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import { createReadStream, createWriteStream, readFileSync } from "fs";
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
createReadStream("fs-stream.js")
|
||||
.pipe(createWriteStream("/tmp/fs-stream.copy.js"))
|
||||
.once("error", (err) => reject(err))
|
||||
.once("finish", () => {
|
||||
try {
|
||||
const copied = readFileSync("/tmp/fs-stream.copy.js", "utf8");
|
||||
const real = readFileSync("/tmp/fs-stream.js", "utf8");
|
||||
if (copied !== real) {
|
||||
reject(
|
||||
new Error("fs-stream.js is not the same as fs-stream.copy.js")
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
67
integration/bunjs-only-snippets/fs.test.js
Normal file
67
integration/bunjs-only-snippets/fs.test.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import { mkdirSync, existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
|
||||
describe("mkdirSync", () => {
|
||||
it("should create a directory", () => {
|
||||
const tempdir = `/tmp/fs.test.js/${Date.now()}/1234/hi`;
|
||||
expect(existsSync(tempdir)).toBe(false);
|
||||
expect(tempdir.includes(mkdirSync(tempdir, { recursive: true }))).toBe(
|
||||
true
|
||||
);
|
||||
expect(existsSync(tempdir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readFileSync", () => {
|
||||
it("works", () => {
|
||||
const text = readFileSync(import.meta.dir + "/readFileSync.txt", "utf8");
|
||||
expect(text).toBe("File read successfully");
|
||||
});
|
||||
|
||||
it("returning Uint8Array works", () => {
|
||||
const text = readFileSync(import.meta.dir + "/readFileSync.txt");
|
||||
const encoded = [
|
||||
70, 105, 108, 101, 32, 114, 101, 97, 100, 32, 115, 117, 99, 99, 101, 115,
|
||||
115, 102, 117, 108, 108, 121,
|
||||
];
|
||||
for (let i = 0; i < encoded.length; i++) {
|
||||
expect(text[i]).toBe(encoded[i]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeFileSync", () => {
|
||||
it("works", () => {
|
||||
const path = `/tmp/${Date.now()}.writeFileSync.txt`;
|
||||
writeFileSync(path, "File written successfully", "utf8");
|
||||
|
||||
expect(readFileSync(path, "utf8")).toBe("File written successfully");
|
||||
});
|
||||
|
||||
it("returning Uint8Array works", () => {
|
||||
const buffer = new Uint8Array([
|
||||
70, 105, 108, 101, 32, 119, 114, 105, 116, 116, 101, 110, 32, 115, 117,
|
||||
99, 99, 101, 115, 115, 102, 117, 108, 108, 121,
|
||||
]);
|
||||
const path = `/tmp/${Date.now()}.blob.writeFileSync.txt`;
|
||||
writeFileSync(path, buffer);
|
||||
const out = readFileSync(path);
|
||||
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
expect(buffer[i]).toBe(out[i]);
|
||||
}
|
||||
});
|
||||
it("returning ArrayBuffer works", () => {
|
||||
const buffer = new Uint8Array([
|
||||
70, 105, 108, 101, 32, 119, 114, 105, 116, 116, 101, 110, 32, 115, 117,
|
||||
99, 99, 101, 115, 115, 102, 117, 108, 108, 121,
|
||||
]);
|
||||
const path = `/tmp/${Date.now()}.blob2.writeFileSync.txt`;
|
||||
writeFileSync(path, buffer.buffer);
|
||||
const out = readFileSync(path);
|
||||
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
expect(buffer[i]).toBe(out[i]);
|
||||
}
|
||||
});
|
||||
});
|
||||
13
integration/bunjs-only-snippets/import-meta.test.js
Normal file
13
integration/bunjs-only-snippets/import-meta.test.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import { it, expect } from "bun:test";
|
||||
|
||||
const { path, dir } = import.meta;
|
||||
|
||||
it("import.meta.dir", () => {
|
||||
expect(dir.endsWith("/bun/integration/bunjs-only-snippets")).toBe(true);
|
||||
});
|
||||
|
||||
it("import.meta.path", () => {
|
||||
expect(
|
||||
path.endsWith("/bun/integration/bunjs-only-snippets/import-meta.test.js")
|
||||
).toBe(true);
|
||||
});
|
||||
7
integration/bunjs-only-snippets/macro-check.js
Normal file
7
integration/bunjs-only-snippets/macro-check.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export function keepSecondArgument(args) {
|
||||
return args.arguments[1];
|
||||
}
|
||||
|
||||
export function bacon(args) {
|
||||
return args.arguments[1];
|
||||
}
|
||||
76
integration/bunjs-only-snippets/microtask.js
Normal file
76
integration/bunjs-only-snippets/microtask.js
Normal file
@@ -0,0 +1,76 @@
|
||||
// You can verify this test is correct by copy pasting this into a browser's console and checking it doesn't throw an error.
|
||||
var run = 0;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 0) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 3) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 1) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 4) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 6) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 2) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 5) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 7) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask(1234);
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is not a function"
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask();
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is empty"
|
||||
);
|
||||
}
|
||||
457
integration/bunjs-only-snippets/path.test.js
Normal file
457
integration/bunjs-only-snippets/path.test.js
Normal file
@@ -0,0 +1,457 @@
|
||||
const { file } = import.meta;
|
||||
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import * as path from "node:path";
|
||||
import assert from "assert";
|
||||
|
||||
const strictEqual = (...args) => {
|
||||
assert.strictEqual(...args);
|
||||
expect(true).toBe(true);
|
||||
};
|
||||
|
||||
it("path.basename", () => {
|
||||
strictEqual(path.basename(file), "path.test.js");
|
||||
strictEqual(path.basename(file, ".js"), "path.test");
|
||||
strictEqual(path.basename(".js", ".js"), "");
|
||||
strictEqual(path.basename(""), "");
|
||||
strictEqual(path.basename("/dir/basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("/basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext/"), "basename.ext");
|
||||
strictEqual(path.basename("basename.ext//"), "basename.ext");
|
||||
strictEqual(path.basename("aaa/bbb", "/bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "a/bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb//", "bbb"), "bbb");
|
||||
strictEqual(path.basename("aaa/bbb", "bb"), "b");
|
||||
strictEqual(path.basename("aaa/bbb", "b"), "bb");
|
||||
strictEqual(path.basename("/aaa/bbb", "/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "a/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb//", "bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/bbb", "bb"), "b");
|
||||
strictEqual(path.basename("/aaa/bbb", "b"), "bb");
|
||||
strictEqual(path.basename("/aaa/bbb"), "bbb");
|
||||
strictEqual(path.basename("/aaa/"), "aaa");
|
||||
strictEqual(path.basename("/aaa/b"), "b");
|
||||
strictEqual(path.basename("/a/b"), "b");
|
||||
strictEqual(path.basename("//a"), "a");
|
||||
strictEqual(path.basename("a", "a"), "");
|
||||
|
||||
// // On Windows a backslash acts as a path separator.
|
||||
strictEqual(path.win32.basename("\\dir\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("basename.ext\\\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("foo"), "foo");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "\\bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "a\\bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb\\\\\\\\", "bbb"), "bbb");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "bb"), "b");
|
||||
strictEqual(path.win32.basename("aaa\\bbb", "b"), "bb");
|
||||
strictEqual(path.win32.basename("C:"), "");
|
||||
strictEqual(path.win32.basename("C:."), ".");
|
||||
strictEqual(path.win32.basename("C:\\"), "");
|
||||
strictEqual(path.win32.basename("C:\\dir\\base.ext"), "base.ext");
|
||||
strictEqual(path.win32.basename("C:\\basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:basename.ext\\\\"), "basename.ext");
|
||||
strictEqual(path.win32.basename("C:foo"), "foo");
|
||||
strictEqual(path.win32.basename("file:stream"), "file:stream");
|
||||
strictEqual(path.win32.basename("a", "a"), "");
|
||||
|
||||
// On unix a backslash is just treated as any other character.
|
||||
strictEqual(
|
||||
path.posix.basename("\\dir\\basename.ext"),
|
||||
"\\dir\\basename.ext"
|
||||
);
|
||||
strictEqual(path.posix.basename("\\basename.ext"), "\\basename.ext");
|
||||
strictEqual(path.posix.basename("basename.ext"), "basename.ext");
|
||||
strictEqual(path.posix.basename("basename.ext\\"), "basename.ext\\");
|
||||
strictEqual(path.posix.basename("basename.ext\\\\"), "basename.ext\\\\");
|
||||
strictEqual(path.posix.basename("foo"), "foo");
|
||||
|
||||
// POSIX filenames may include control characters
|
||||
// c.f. http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html
|
||||
const controlCharFilename = `Icon${String.fromCharCode(13)}`;
|
||||
strictEqual(
|
||||
path.posix.basename(`/a/b/${controlCharFilename}`),
|
||||
controlCharFilename
|
||||
);
|
||||
});
|
||||
|
||||
it("path.join", () => {
|
||||
const failures = [];
|
||||
const backslashRE = /\\/g;
|
||||
|
||||
const joinTests = [
|
||||
[
|
||||
[path.posix.join],
|
||||
// Arguments result
|
||||
[
|
||||
[[".", "x/b", "..", "/b/c.js"], "x/b/c.js"],
|
||||
// [[], '.'],
|
||||
[["/.", "x/b", "..", "/b/c.js"], "/x/b/c.js"],
|
||||
[["/foo", "../../../bar"], "/bar"],
|
||||
[["foo", "../../../bar"], "../../bar"],
|
||||
[["foo/", "../../../bar"], "../../bar"],
|
||||
[["foo/x", "../../../bar"], "../bar"],
|
||||
[["foo/x", "./bar"], "foo/x/bar"],
|
||||
[["foo/x/", "./bar"], "foo/x/bar"],
|
||||
[["foo/x/", ".", "bar"], "foo/x/bar"],
|
||||
[["./"], "./"],
|
||||
[[".", "./"], "./"],
|
||||
[[".", ".", "."], "."],
|
||||
[[".", "./", "."], "."],
|
||||
[[".", "/./", "."], "."],
|
||||
[[".", "/////./", "."], "."],
|
||||
[["."], "."],
|
||||
[["", "."], "."],
|
||||
[["", "foo"], "foo"],
|
||||
[["foo", "/bar"], "foo/bar"],
|
||||
[["", "/foo"], "/foo"],
|
||||
[["", "", "/foo"], "/foo"],
|
||||
[["", "", "foo"], "foo"],
|
||||
[["foo", ""], "foo"],
|
||||
[["foo/", ""], "foo/"],
|
||||
[["foo", "", "/bar"], "foo/bar"],
|
||||
[["./", "..", "/foo"], "../foo"],
|
||||
[["./", "..", "..", "/foo"], "../../foo"],
|
||||
[[".", "..", "..", "/foo"], "../../foo"],
|
||||
[["", "..", "..", "/foo"], "../../foo"],
|
||||
[["/"], "/"],
|
||||
[["/", "."], "/"],
|
||||
[["/", ".."], "/"],
|
||||
[["/", "..", ".."], "/"],
|
||||
[[""], "."],
|
||||
[["", ""], "."],
|
||||
[[" /foo"], " /foo"],
|
||||
[[" ", "foo"], " /foo"],
|
||||
[[" ", "."], " "],
|
||||
[[" ", "/"], " /"],
|
||||
[[" ", ""], " "],
|
||||
[["/", "foo"], "/foo"],
|
||||
[["/", "/foo"], "/foo"],
|
||||
[["/", "//foo"], "/foo"],
|
||||
[["/", "", "/foo"], "/foo"],
|
||||
[["", "/", "foo"], "/foo"],
|
||||
[["", "/", "/foo"], "/foo"],
|
||||
],
|
||||
],
|
||||
];
|
||||
|
||||
// // Windows-specific join tests
|
||||
// joinTests.push([
|
||||
// path.win32.join,
|
||||
// joinTests[0][1].slice(0).concat([
|
||||
// // Arguments result
|
||||
// // UNC path expected
|
||||
// [["//foo/bar"], "\\\\foo\\bar\\"],
|
||||
// [["\\/foo/bar"], "\\\\foo\\bar\\"],
|
||||
// [["\\\\foo/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - server and share separate
|
||||
// [["//foo", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - questionable
|
||||
// [["//foo", "", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["//foo/", "", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // UNC path expected - even more questionable
|
||||
// [["", "//foo", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["", "//foo/", "bar"], "\\\\foo\\bar\\"],
|
||||
// [["", "//foo/", "/bar"], "\\\\foo\\bar\\"],
|
||||
// // No UNC path expected (no double slash in first component)
|
||||
// [["\\", "foo/bar"], "\\foo\\bar"],
|
||||
// [["\\", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["", "/", "/foo/bar"], "\\foo\\bar"],
|
||||
// // No UNC path expected (no non-slashes in first component -
|
||||
// // questionable)
|
||||
// [["//", "foo/bar"], "\\foo\\bar"],
|
||||
// [["//", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["\\\\", "/", "/foo/bar"], "\\foo\\bar"],
|
||||
// [["//"], "\\"],
|
||||
// // No UNC path expected (share name missing - questionable).
|
||||
// [["//foo"], "\\foo"],
|
||||
// [["//foo/"], "\\foo\\"],
|
||||
// [["//foo", "/"], "\\foo\\"],
|
||||
// [["//foo", "", "/"], "\\foo\\"],
|
||||
// // No UNC path expected (too many leading slashes - questionable)
|
||||
// [["///foo/bar"], "\\foo\\bar"],
|
||||
// [["////foo", "bar"], "\\foo\\bar"],
|
||||
// [["\\\\\\/foo/bar"], "\\foo\\bar"],
|
||||
// // Drive-relative vs drive-absolute paths. This merely describes the
|
||||
// // status quo, rather than being obviously right
|
||||
// [["c:"], "c:."],
|
||||
// [["c:."], "c:."],
|
||||
// [["c:", ""], "c:."],
|
||||
// [["", "c:"], "c:."],
|
||||
// [["c:.", "/"], "c:.\\"],
|
||||
// [["c:.", "file"], "c:file"],
|
||||
// [["c:", "/"], "c:\\"],
|
||||
// [["c:", "file"], "c:\\file"],
|
||||
// ]),
|
||||
// ]);
|
||||
joinTests.forEach((test) => {
|
||||
if (!Array.isArray(test[0])) test[0] = [test[0]];
|
||||
test[0].forEach((join) => {
|
||||
test[1].forEach((test) => {
|
||||
const actual = join.apply(null, test[0]);
|
||||
const expected = test[1];
|
||||
// For non-Windows specific tests with the Windows join(), we need to try
|
||||
// replacing the slashes since the non-Windows specific tests' `expected`
|
||||
// use forward slashes
|
||||
let actualAlt;
|
||||
let os;
|
||||
if (join === path.win32.join) {
|
||||
actualAlt = actual.replace(backslashRE, "/");
|
||||
os = "win32";
|
||||
} else {
|
||||
os = "posix";
|
||||
}
|
||||
if (actual !== expected && actualAlt !== expected) {
|
||||
const delimiter = test[0].map(JSON.stringify).join(",");
|
||||
const message = `path.${os}.join(${delimiter})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
strictEqual(failures.length, 0, failures.join(""));
|
||||
});
|
||||
|
||||
it("path.relative", () => {
|
||||
const failures = [];
|
||||
|
||||
const relativeTests = [
|
||||
// [
|
||||
// path.win32.relative,
|
||||
// // Arguments result
|
||||
// [
|
||||
// ["c:/blah\\blah", "d:/games", "d:\\games"],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa", ".."],
|
||||
// ["c:/aaaa/bbbb", "c:/cccc", "..\\..\\cccc"],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa/bbbb", ""],
|
||||
// ["c:/aaaa/bbbb", "c:/aaaa/cccc", "..\\cccc"],
|
||||
// ["c:/aaaa/", "c:/aaaa/cccc", "cccc"],
|
||||
// ["c:/", "c:\\aaaa\\bbbb", "aaaa\\bbbb"],
|
||||
// ["c:/aaaa/bbbb", "d:\\", "d:\\"],
|
||||
// ["c:/AaAa/bbbb", "c:/aaaa/bbbb", ""],
|
||||
// ["c:/aaaaa/", "c:/aaaa/cccc", "..\\aaaa\\cccc"],
|
||||
// ["C:\\foo\\bar\\baz\\quux", "C:\\", "..\\..\\..\\.."],
|
||||
// [
|
||||
// "C:\\foo\\test",
|
||||
// "C:\\foo\\test\\bar\\package.json",
|
||||
// "bar\\package.json",
|
||||
// ],
|
||||
// ["C:\\foo\\bar\\baz-quux", "C:\\foo\\bar\\baz", "..\\baz"],
|
||||
// ["C:\\foo\\bar\\baz", "C:\\foo\\bar\\baz-quux", "..\\baz-quux"],
|
||||
// ["\\\\foo\\bar", "\\\\foo\\bar\\baz", "baz"],
|
||||
// ["\\\\foo\\bar\\baz", "\\\\foo\\bar", ".."],
|
||||
// ["\\\\foo\\bar\\baz-quux", "\\\\foo\\bar\\baz", "..\\baz"],
|
||||
// ["\\\\foo\\bar\\baz", "\\\\foo\\bar\\baz-quux", "..\\baz-quux"],
|
||||
// ["C:\\baz-quux", "C:\\baz", "..\\baz"],
|
||||
// ["C:\\baz", "C:\\baz-quux", "..\\baz-quux"],
|
||||
// ["\\\\foo\\baz-quux", "\\\\foo\\baz", "..\\baz"],
|
||||
// ["\\\\foo\\baz", "\\\\foo\\baz-quux", "..\\baz-quux"],
|
||||
// ["C:\\baz", "\\\\foo\\bar\\baz", "\\\\foo\\bar\\baz"],
|
||||
// ["\\\\foo\\bar\\baz", "C:\\baz", "C:\\baz"],
|
||||
// ],
|
||||
// ],
|
||||
[
|
||||
path.posix.relative,
|
||||
// Arguments result
|
||||
[
|
||||
["/var/lib", "/var", ".."],
|
||||
["/var/lib", "/bin", "../../bin"],
|
||||
["/var/lib", "/var/lib", ""],
|
||||
["/var/lib", "/var/apache", "../apache"],
|
||||
["/var/", "/var/lib", "lib"],
|
||||
["/", "/var/lib", "var/lib"],
|
||||
["/foo/test", "/foo/test/bar/package.json", "bar/package.json"],
|
||||
["/Users/a/web/b/test/mails", "/Users/a/web/b", "../.."],
|
||||
["/foo/bar/baz-quux", "/foo/bar/baz", "../baz"],
|
||||
["/foo/bar/baz", "/foo/bar/baz-quux", "../baz-quux"],
|
||||
["/baz-quux", "/baz", "../baz"],
|
||||
["/baz", "/baz-quux", "../baz-quux"],
|
||||
["/page1/page2/foo", "/", "../../.."],
|
||||
],
|
||||
],
|
||||
];
|
||||
|
||||
relativeTests.forEach((test) => {
|
||||
const relative = test[0];
|
||||
test[1].forEach((test) => {
|
||||
const actual = relative(test[0], test[1]);
|
||||
const expected = test[2];
|
||||
if (actual !== expected) {
|
||||
const os = relative === path.win32.relative ? "win32" : "posix";
|
||||
const message = `path.${os}.relative(${test
|
||||
.slice(0, 2)
|
||||
.map(JSON.stringify)
|
||||
.join(",")})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
strictEqual(failures.length, 0, failures.join(""));
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
it("path.normalize", () => {
|
||||
// strictEqual(
|
||||
// path.win32.normalize("./fixtures///b/../b/c.js"),
|
||||
// "fixtures\\b\\c.js"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("/foo/../../../bar"), "\\bar");
|
||||
// strictEqual(path.win32.normalize("a//b//../b"), "a\\b");
|
||||
// strictEqual(path.win32.normalize("a//b//./c"), "a\\b\\c");
|
||||
// strictEqual(path.win32.normalize("a//b//."), "a\\b");
|
||||
// strictEqual(
|
||||
// path.win32.normalize("//server/share/dir/file.ext"),
|
||||
// "\\\\server\\share\\dir\\file.ext"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("/a/b/c/../../../x/y/z"), "\\x\\y\\z");
|
||||
// strictEqual(path.win32.normalize("C:"), "C:.");
|
||||
// strictEqual(path.win32.normalize("C:..\\abc"), "C:..\\abc");
|
||||
// strictEqual(path.win32.normalize("C:..\\..\\abc\\..\\def"), "C:..\\..\\def");
|
||||
// strictEqual(path.win32.normalize("C:\\."), "C:\\");
|
||||
// strictEqual(path.win32.normalize("file:stream"), "file:stream");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\..\\"), "bar\\");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\.."), "bar");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\..\\baz"), "bar\\baz");
|
||||
// strictEqual(path.win32.normalize("bar\\foo..\\"), "bar\\foo..\\");
|
||||
// strictEqual(path.win32.normalize("bar\\foo.."), "bar\\foo..");
|
||||
// strictEqual(path.win32.normalize("..\\foo..\\..\\..\\bar"), "..\\..\\bar");
|
||||
// strictEqual(
|
||||
// path.win32.normalize("..\\...\\..\\.\\...\\..\\..\\bar"),
|
||||
// "..\\..\\bar"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../../../foo/../../../bar"),
|
||||
// "..\\..\\..\\..\\..\\bar"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../../../foo/../../../bar/../../"),
|
||||
// "..\\..\\..\\..\\..\\..\\"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../foobar/barfoo/foo/../../../bar/../../"),
|
||||
// "..\\..\\"
|
||||
// );
|
||||
// strictEqual(
|
||||
// path.win32.normalize("../.../../foobar/../../../bar/../../baz"),
|
||||
// "..\\..\\..\\..\\baz"
|
||||
// );
|
||||
// strictEqual(path.win32.normalize("foo/bar\\baz"), "foo\\bar\\baz");
|
||||
|
||||
strictEqual(
|
||||
path.posix.normalize("./fixtures///b/../b/c.js"),
|
||||
"fixtures/b/c.js"
|
||||
);
|
||||
strictEqual(path.posix.normalize("/foo/../../../bar"), "/bar");
|
||||
strictEqual(path.posix.normalize("a//b//../b"), "a/b");
|
||||
strictEqual(path.posix.normalize("a//b//./c"), "a/b/c");
|
||||
strictEqual(path.posix.normalize("a//b//."), "a/b");
|
||||
strictEqual(path.posix.normalize("/a/b/c/../../../x/y/z"), "/x/y/z");
|
||||
strictEqual(path.posix.normalize("///..//./foo/.//bar"), "/foo/bar");
|
||||
strictEqual(path.posix.normalize("bar/foo../../"), "bar/");
|
||||
strictEqual(path.posix.normalize("bar/foo../.."), "bar");
|
||||
strictEqual(path.posix.normalize("bar/foo../../baz"), "bar/baz");
|
||||
strictEqual(path.posix.normalize("bar/foo../"), "bar/foo../");
|
||||
strictEqual(path.posix.normalize("bar/foo.."), "bar/foo..");
|
||||
console.log("A");
|
||||
strictEqual(path.posix.normalize("../foo../../../bar"), "../../bar");
|
||||
console.log("B");
|
||||
strictEqual(path.posix.normalize("../.../.././.../../../bar"), "../../bar");
|
||||
strictEqual(
|
||||
path.posix.normalize("../../../foo/../../../bar"),
|
||||
"../../../../../bar"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../../../foo/../../../bar/../../"),
|
||||
"../../../../../../"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../foobar/barfoo/foo/../../../bar/../../"),
|
||||
"../../"
|
||||
);
|
||||
strictEqual(
|
||||
path.posix.normalize("../.../../foobar/../../../bar/../../baz"),
|
||||
"../../../../baz"
|
||||
);
|
||||
strictEqual(path.posix.normalize("foo/bar\\baz"), "foo/bar\\baz");
|
||||
});
|
||||
|
||||
it("path.resolve", () => {
|
||||
const failures = [];
|
||||
const slashRE = /\//g;
|
||||
const backslashRE = /\\/g;
|
||||
|
||||
const resolveTests = [
|
||||
// [
|
||||
// path.win32.resolve,
|
||||
// // Arguments result
|
||||
// [
|
||||
// [["c:/blah\\blah", "d:/games", "c:../a"], "c:\\blah\\a"],
|
||||
// [["c:/ignore", "d:\\a/b\\c/d", "\\e.exe"], "d:\\e.exe"],
|
||||
// [["c:/ignore", "c:/some/file"], "c:\\some\\file"],
|
||||
// [["d:/ignore", "d:some/dir//"], "d:\\ignore\\some\\dir"],
|
||||
// [["."], process.cwd()],
|
||||
// [["//server/share", "..", "relative\\"], "\\\\server\\share\\relative"],
|
||||
// [["c:/", "//"], "c:\\"],
|
||||
// [["c:/", "//dir"], "c:\\dir"],
|
||||
// [["c:/", "//server/share"], "\\\\server\\share\\"],
|
||||
// [["c:/", "//server//share"], "\\\\server\\share\\"],
|
||||
// [["c:/", "///some//dir"], "c:\\some\\dir"],
|
||||
// [
|
||||
// ["C:\\foo\\tmp.3\\", "..\\tmp.3\\cycles\\root.js"],
|
||||
// "C:\\foo\\tmp.3\\cycles\\root.js",
|
||||
// ],
|
||||
// ],
|
||||
// ],
|
||||
[
|
||||
path.posix.resolve,
|
||||
// Arguments result
|
||||
[
|
||||
[["/var/lib", "../", "file/"], "/var/file"],
|
||||
[["/var/lib", "/../", "file/"], "/file"],
|
||||
[["a/b/c/", "../../.."], process.cwd()],
|
||||
[["."], process.cwd()],
|
||||
[["/some/dir", ".", "/absolute/"], "/absolute"],
|
||||
[
|
||||
["/foo/tmp.3/", "../tmp.3/cycles/root.js"],
|
||||
"/foo/tmp.3/cycles/root.js",
|
||||
],
|
||||
],
|
||||
],
|
||||
];
|
||||
const isWindows = false;
|
||||
resolveTests.forEach(([resolve, tests]) => {
|
||||
tests.forEach(([test, expected]) => {
|
||||
const actual = resolve.apply(null, test);
|
||||
let actualAlt;
|
||||
const os = resolve === path.win32.resolve ? "win32" : "posix";
|
||||
if (resolve === path.win32.resolve && !isWindows)
|
||||
actualAlt = actual.replace(backslashRE, "/");
|
||||
else if (resolve !== path.win32.resolve && isWindows)
|
||||
actualAlt = actual.replace(slashRE, "\\");
|
||||
|
||||
const message = `path.${os}.resolve(${test
|
||||
.map(JSON.stringify)
|
||||
.join(",")})\n expect=${JSON.stringify(
|
||||
expected
|
||||
)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected && actualAlt !== expected) failures.push(message);
|
||||
});
|
||||
});
|
||||
strictEqual(failures.length, 0, failures.join("\n"));
|
||||
});
|
||||
91
integration/bunjs-only-snippets/process-nexttick.js
Normal file
91
integration/bunjs-only-snippets/process-nexttick.js
Normal file
@@ -0,0 +1,91 @@
|
||||
// You can verify this test is correct by copy pasting this into a browser's console and checking it doesn't throw an error.
|
||||
var run = 0;
|
||||
|
||||
var queueMicrotask = process.nextTick;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 0) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 3) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 1) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 4) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 6) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 2) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 5) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (run++ != 7) {
|
||||
reject(new Error("Microtask execution order is wrong: " + run));
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask(1234);
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is not a function"
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
var passed = false;
|
||||
try {
|
||||
queueMicrotask();
|
||||
} catch (exception) {
|
||||
passed = exception instanceof TypeError;
|
||||
}
|
||||
|
||||
if (!passed)
|
||||
throw new Error(
|
||||
"queueMicrotask should throw a TypeError if the argument is empty"
|
||||
);
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
process.nextTick(
|
||||
(first, second) => {
|
||||
console.log(first, second);
|
||||
if (first !== 12345 || second !== "hello")
|
||||
reject(new Error("process.nextTick called with wrong arguments"));
|
||||
resolve(true);
|
||||
},
|
||||
12345,
|
||||
"hello"
|
||||
);
|
||||
});
|
||||
48
integration/bunjs-only-snippets/process.js
Normal file
48
integration/bunjs-only-snippets/process.js
Normal file
@@ -0,0 +1,48 @@
|
||||
// this property isn't implemented yet but it should at least return a string
|
||||
const isNode = !process.isBun;
|
||||
|
||||
if (!isNode && process.title !== "bun")
|
||||
throw new Error("process.title is not 'bun'");
|
||||
|
||||
if (typeof process.env.USER !== "string")
|
||||
throw new Error("process.env is not an object");
|
||||
|
||||
if (process.env.USER.length === 0)
|
||||
throw new Error("process.env is missing a USER property");
|
||||
|
||||
if (process.platform !== "darwin" && process.platform !== "linux")
|
||||
throw new Error("process.platform is invalid");
|
||||
|
||||
if (isNode) throw new Error("process.isBun is invalid");
|
||||
|
||||
// partially to test it doesn't crash due to various strange types
|
||||
process.env.BACON = "yummy";
|
||||
if (process.env.BACON !== "yummy") {
|
||||
throw new Error("process.env is not writable");
|
||||
}
|
||||
|
||||
delete process.env.BACON;
|
||||
if (typeof process.env.BACON !== "undefined") {
|
||||
throw new Error("process.env is not deletable");
|
||||
}
|
||||
|
||||
process.env.BACON = "yummy";
|
||||
if (process.env.BACON !== "yummy") {
|
||||
throw new Error("process.env is not re-writable");
|
||||
}
|
||||
|
||||
if (JSON.parse(JSON.stringify(process.env)).BACON !== "yummy") {
|
||||
throw new Error("process.env is not serializable");
|
||||
}
|
||||
|
||||
if (typeof JSON.parse(JSON.stringify(process.env)).toJSON !== "undefined") {
|
||||
throw new Error("process.env should call toJSON to hide its internal state");
|
||||
}
|
||||
|
||||
var { env, ...proces } = process;
|
||||
console.log(JSON.stringify(proces, null, 2));
|
||||
console.log(proces);
|
||||
|
||||
console.log("CWD", process.cwd());
|
||||
console.log("SET CWD", process.chdir("../"));
|
||||
console.log("CWD", process.cwd());
|
||||
1
integration/bunjs-only-snippets/readFileSync.txt
Normal file
1
integration/bunjs-only-snippets/readFileSync.txt
Normal file
@@ -0,0 +1 @@
|
||||
File read successfully
|
||||
9
integration/bunjs-only-snippets/readdir.js
Normal file
9
integration/bunjs-only-snippets/readdir.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const { readdirSync } = require("fs");
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
readdirSync(".");
|
||||
}
|
||||
|
||||
console.log(readdirSync("."));
|
||||
@@ -1,7 +1,7 @@
|
||||
const interval = 0.5;
|
||||
const interval = 0.01;
|
||||
const now = performance.now();
|
||||
console.time("Slept");
|
||||
Bun.sleep(interval);
|
||||
Bun.sleepSync(interval);
|
||||
const elapsed = performance.now() - now;
|
||||
if (elapsed < interval) {
|
||||
throw new Error("Didn't sleep");
|
||||
|
||||
51
integration/bunjs-only-snippets/some-fs.js
Normal file
51
integration/bunjs-only-snippets/some-fs.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const { mkdirSync, existsSync } = require("fs");
|
||||
|
||||
var performance = globalThis.performance;
|
||||
if (!performance) {
|
||||
try {
|
||||
performance = require("perf_hooks").performance;
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
var tempdir = `/tmp/some-fs-test/dir/${Date.now()}/hi`;
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
tempdir += `/${i.toString(36)}`;
|
||||
}
|
||||
|
||||
if (existsSync(tempdir)) {
|
||||
throw new Error(
|
||||
`existsSync reports ${tempdir} exists, but it probably does not`
|
||||
);
|
||||
}
|
||||
|
||||
var origTempDir = tempdir;
|
||||
var iterations = new Array(count * count).fill("");
|
||||
var total = 0;
|
||||
for (let i = 0; i < count; i++) {
|
||||
for (let j = 0; j < count; j++) {
|
||||
iterations[total++] = `${origTempDir}/${j.toString(36)}-${i.toString(36)}`;
|
||||
}
|
||||
}
|
||||
tempdir = origTempDir;
|
||||
mkdirSync(origTempDir, { recursive: true });
|
||||
const recurse = { recursive: false };
|
||||
const start = performance.now();
|
||||
for (let i = 0; i < total; i++) {
|
||||
mkdirSync(iterations[i], recurse);
|
||||
}
|
||||
|
||||
console.log("MKDIR " + total + " depth took:", performance.now() - start, "ms");
|
||||
|
||||
if (!existsSync(tempdir)) {
|
||||
throw new Error(
|
||||
"Expected directory to exist after mkdirSync, but it doesn't"
|
||||
);
|
||||
}
|
||||
|
||||
if (mkdirSync(tempdir, { recursive: true })) {
|
||||
throw new Error(
|
||||
"mkdirSync shouldn't return directory name on existing directories"
|
||||
);
|
||||
}
|
||||
30
integration/bunjs-only-snippets/toml-fixture.toml
Normal file
30
integration/bunjs-only-snippets/toml-fixture.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
framework = "next"
|
||||
origin = "http://localhost:5000"
|
||||
inline.array = [1234, 4, 5, 6]
|
||||
|
||||
|
||||
[macros]
|
||||
react-relay = { "graphql" = "node_modules/bun-macro-relay/bun-macro-relay.tsx" }
|
||||
|
||||
[bundle.packages]
|
||||
"@emotion/react" = true
|
||||
|
||||
|
||||
[dev]
|
||||
foo = 123
|
||||
"foo.bar" = "baz"
|
||||
"abba.baba" = "baba"
|
||||
dabba = -123
|
||||
doo = 123.456
|
||||
one.two.three = 4
|
||||
|
||||
[[array]]
|
||||
entry_one = "one"
|
||||
entry_two = "two"
|
||||
|
||||
[[array]]
|
||||
entry_one = "three"
|
||||
|
||||
[[array.nested]]
|
||||
entry_one = "four"
|
||||
17
integration/bunjs-only-snippets/toml.test.js
Normal file
17
integration/bunjs-only-snippets/toml.test.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
|
||||
it("syntax", async () => {
|
||||
const toml = (await import("./toml-fixture.toml")).default;
|
||||
expect(toml.framework).toBe("next");
|
||||
expect(toml.bundle.packages["@emotion/react"]).toBe(true);
|
||||
expect(toml.array[0].entry_one).toBe("one");
|
||||
expect(toml.array[0].entry_two).toBe("two");
|
||||
expect(toml.array[1].entry_one).toBe("three");
|
||||
expect(toml.array[1].entry_two).toBe(undefined);
|
||||
expect(toml.array[1].nested[0].entry_one).toBe("four");
|
||||
expect(toml.dev.one.two.three).toBe(4);
|
||||
expect(toml.dev.foo).toBe(123);
|
||||
expect(toml.inline.array[0]).toBe(1234);
|
||||
expect(toml.inline.array[1]).toBe(4);
|
||||
expect(toml.dev["foo.bar"]).toBe("baz");
|
||||
});
|
||||
140
integration/bunjs-only-snippets/transpiler.test.js
Normal file
140
integration/bunjs-only-snippets/transpiler.test.js
Normal file
@@ -0,0 +1,140 @@
|
||||
import { expect, it, describe } from "bun:test";
|
||||
|
||||
describe("Bun.Transpiler", () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "tsx",
|
||||
define: {
|
||||
"process.env.NODE_ENV": JSON.stringify("development"),
|
||||
},
|
||||
macro: {
|
||||
react: {
|
||||
bacon: `${import.meta.dir}/macro-check.js`,
|
||||
},
|
||||
},
|
||||
platform: "browser",
|
||||
});
|
||||
|
||||
const code = `import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({
|
||||
params
|
||||
}) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export const action: ActionFunction = async ({
|
||||
params
|
||||
}) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export default function PostRoute() {
|
||||
const params = useParams();
|
||||
console.log(params.postId);
|
||||
}
|
||||
|
||||
`;
|
||||
|
||||
describe("scanImports", () => {
|
||||
it("reports import paths, excluding types", () => {
|
||||
const imports = transpiler.scanImports(code);
|
||||
expect(imports.filter(({ path }) => path === "remix")).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("scan", () => {
|
||||
it("reports all export names", () => {
|
||||
const { imports, exports } = transpiler.scan(code);
|
||||
|
||||
expect(exports[0]).toBe("action");
|
||||
expect(exports[2]).toBe("loader");
|
||||
expect(exports[1]).toBe("default");
|
||||
|
||||
expect(exports).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("transform", () => {
|
||||
it("supports macros", async () => {
|
||||
const out = await transpiler.transform(`
|
||||
import {keepSecondArgument} from 'macro:${
|
||||
import.meta.dir
|
||||
}/macro-check.js';
|
||||
|
||||
export default keepSecondArgument("Test failed", "Test passed");
|
||||
`);
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
// ensure both the import and the macro function call are removed
|
||||
expect(out.includes("keepSecondArgument")).toBe(false);
|
||||
});
|
||||
|
||||
it("sync supports macros", () => {
|
||||
const out = transpiler.transformSync(`
|
||||
import {keepSecondArgument} from 'macro:${
|
||||
import.meta.dir
|
||||
}/macro-check.js';
|
||||
|
||||
export default keepSecondArgument("Test failed", "Test passed");
|
||||
`);
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("keepSecondArgument")).toBe(false);
|
||||
});
|
||||
|
||||
const importLines = [
|
||||
"import {createElement, bacon} from 'react';",
|
||||
"import {bacon, createElement} from 'react';",
|
||||
];
|
||||
describe("sync supports macros remap", () => {
|
||||
for (let importLine of importLines) {
|
||||
it(importLine, () => {
|
||||
const out = transpiler.transformSync(`
|
||||
${importLine}
|
||||
|
||||
export default bacon("Test failed", "Test passed");
|
||||
export function hi() { createElement("hi"); }
|
||||
`);
|
||||
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("bacon")).toBe(false);
|
||||
expect(out.includes("createElement")).toBe(true);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("macro remap removes import statement if its the only used one", () => {
|
||||
const out = transpiler.transformSync(`
|
||||
import {bacon} from 'react';
|
||||
|
||||
export default bacon("Test failed", "Test passed");
|
||||
`);
|
||||
|
||||
expect(out.includes("Test failed")).toBe(false);
|
||||
expect(out.includes("Test passed")).toBe(true);
|
||||
|
||||
expect(out.includes("bacon")).toBe(false);
|
||||
expect(out.includes("import")).toBe(false);
|
||||
});
|
||||
|
||||
it("removes types", () => {
|
||||
expect(code.includes("ActionFunction")).toBe(true);
|
||||
expect(code.includes("LoaderFunction")).toBe(true);
|
||||
const out = transpiler.transformSync(code);
|
||||
|
||||
expect(out.includes("ActionFunction")).toBe(false);
|
||||
expect(out.includes("LoaderFunction")).toBe(false);
|
||||
const { exports } = transpiler.scan(out);
|
||||
|
||||
expect(exports[0]).toBe("action");
|
||||
expect(exports[2]).toBe("loader");
|
||||
expect(exports[1]).toBe("default");
|
||||
expect(exports).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
4
integration/macro/assert.tsx
Normal file
4
integration/macro/assert.tsx
Normal file
@@ -0,0 +1,4 @@
|
||||
// This logs the result at build time
|
||||
export function unreachable(call) {
|
||||
throw new Error(call.arguments[0].toString() || "unreachable");
|
||||
}
|
||||
8
integration/macro/fetchSync.tsx
Normal file
8
integration/macro/fetchSync.tsx
Normal file
@@ -0,0 +1,8 @@
|
||||
export async function fetchSync(ctx) {
|
||||
const str = ctx.arguments[0].toString();
|
||||
|
||||
const response = await fetch(str);
|
||||
const text = await response.text();
|
||||
|
||||
return <string value={text} />;
|
||||
}
|
||||
5
integration/macro/hello-fetch-macro.tsx
Normal file
5
integration/macro/hello-fetch-macro.tsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import { fetchSync } from "macro:./fetchSync.tsx";
|
||||
|
||||
const synchronousFetch = fetchSync(`https://example.com`);
|
||||
|
||||
console.log(synchronousFetch);
|
||||
30
integration/macro/loadMocks.tsx
Normal file
30
integration/macro/loadMocks.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { unreachable } from "macro:./assert";
|
||||
|
||||
if (process.env.NODE_ENV !== "test")
|
||||
unreachable("This module should only be imported in tests");
|
||||
|
||||
export const mockData = {
|
||||
Copilot: {
|
||||
id: "Copilot",
|
||||
name: "Copilot",
|
||||
description: "Copilot",
|
||||
icon: "https://s3.amazonaws.com/copilot-public/images/icons/Copilot.png",
|
||||
color: "#00AEEF",
|
||||
type: "service",
|
||||
tags: ["copilot"],
|
||||
categories: ["copilot"],
|
||||
links: [
|
||||
{
|
||||
id: "Copilot",
|
||||
name: "Copilot",
|
||||
url: "https://copilot.io",
|
||||
description: "Copilot",
|
||||
icon: "https://s3.amazonaws.com/copilot-public/images/icons/Copilot.png",
|
||||
color: "#00AEEF",
|
||||
type: "service",
|
||||
tags: ["copilot"],
|
||||
categories: ["copilot"],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
@@ -104,8 +104,6 @@ async function main() {
|
||||
async function runPage(key) {
|
||||
var page;
|
||||
try {
|
||||
console.log("launched");
|
||||
|
||||
page = await browser.newPage();
|
||||
if (USE_EXISTING_PROCESS) {
|
||||
await page.evaluate(`
|
||||
|
||||
@@ -1,84 +1,85 @@
|
||||
import snippets from "./snippets.json";
|
||||
const fail = true;
|
||||
// import snippets from "./snippets.json";
|
||||
|
||||
globalThis.console.assert = (condition, ...content) => {
|
||||
if (!condition) {
|
||||
throw new Error(content.join(" "));
|
||||
}
|
||||
};
|
||||
globalThis.getModuleScriptSrc = async (name) => {
|
||||
const response = await fetch(name, {
|
||||
cache: "force-cache",
|
||||
});
|
||||
// globalThis.console.assert = (condition, ...content) => {
|
||||
// if (!condition) {
|
||||
// throw new Error(content.join(" "));
|
||||
// }
|
||||
// };
|
||||
// globalThis.getModuleScriptSrc = async (name) => {
|
||||
// const response = await fetch(name, {
|
||||
// cache: "force-cache",
|
||||
// });
|
||||
|
||||
if (response.ok) {
|
||||
return await response.text();
|
||||
} else {
|
||||
throw new Error(`Failed to get module script ${name}`);
|
||||
}
|
||||
};
|
||||
// if (response.ok) {
|
||||
// return await response.text();
|
||||
// } else {
|
||||
// throw new Error(`Failed to get module script ${name}`);
|
||||
// }
|
||||
// };
|
||||
|
||||
globalThis.runTest = async (name) => {
|
||||
testSuccess = false;
|
||||
var Namespace = await import(name);
|
||||
var testFunction = Namespace.test;
|
||||
// globalThis.runTest = async (name) => {
|
||||
// testSuccess = false;
|
||||
// var Namespace = await import(name);
|
||||
// var testFunction = Namespace.test;
|
||||
|
||||
if (
|
||||
!("test" in Namespace) &&
|
||||
"default" in Namespace &&
|
||||
typeof Namespace.default === "function"
|
||||
) {
|
||||
Namespace = Namespace.default();
|
||||
testFunction = Namespace.test;
|
||||
}
|
||||
// if (
|
||||
// !("test" in Namespace) &&
|
||||
// "default" in Namespace &&
|
||||
// typeof Namespace.default === "function"
|
||||
// ) {
|
||||
// Namespace = Namespace.default();
|
||||
// testFunction = Namespace.test;
|
||||
// }
|
||||
|
||||
if (!testFunction) {
|
||||
throw new Error("No test function found in " + name);
|
||||
}
|
||||
// if (!testFunction) {
|
||||
// throw new Error("No test function found in " + name);
|
||||
// }
|
||||
|
||||
if (typeof testFunction !== "function") {
|
||||
throw new Error(
|
||||
`Expected (await import(\"${name}\"")) to have a test function.\nReceived: ${Object.keys(
|
||||
Namespace
|
||||
).join(", ")} `
|
||||
);
|
||||
}
|
||||
// if (typeof testFunction !== "function") {
|
||||
// throw new Error(
|
||||
// `Expected (await import(\"${name}\"")) to have a test function.\nReceived: ${Object.keys(
|
||||
// Namespace
|
||||
// ).join(", ")} `
|
||||
// );
|
||||
// }
|
||||
|
||||
if (globalThis.BUN_DEBUG_MODE) {
|
||||
try {
|
||||
await testFunction();
|
||||
if (!testSuccess) {
|
||||
throw new Error("Test failed");
|
||||
}
|
||||
} catch (exception) {
|
||||
console.error(exception);
|
||||
debugger;
|
||||
throw exception;
|
||||
}
|
||||
} else {
|
||||
await testFunction();
|
||||
if (!testSuccess) {
|
||||
throw new Error("Test failed");
|
||||
}
|
||||
}
|
||||
};
|
||||
// if (globalThis.BUN_DEBUG_MODE) {
|
||||
// try {
|
||||
// await testFunction();
|
||||
// if (!testSuccess) {
|
||||
// throw new Error("Test failed");
|
||||
// }
|
||||
// } catch (exception) {
|
||||
// console.error(exception);
|
||||
// debugger;
|
||||
// throw exception;
|
||||
// }
|
||||
// } else {
|
||||
// await testFunction();
|
||||
// if (!testSuccess) {
|
||||
// throw new Error("Test failed");
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
var testSuccess = false;
|
||||
globalThis.testDone = () => {
|
||||
testSuccess = true;
|
||||
};
|
||||
// var testSuccess = false;
|
||||
// globalThis.testDone = () => {
|
||||
// testSuccess = true;
|
||||
// };
|
||||
|
||||
let fail = 0;
|
||||
for (let snippet of snippets) {
|
||||
try {
|
||||
await runTest("../snippets/" + snippet.substring(1));
|
||||
console.log("✅", snippet);
|
||||
} catch (exception) {
|
||||
console.error(`❌ ${snippet}`);
|
||||
console.error(exception);
|
||||
// let fail = 0;
|
||||
// for (let snippet of snippets) {
|
||||
// try {
|
||||
// await runTest("../snippets/" + snippet.substring(1));
|
||||
// console.log("✅", snippet);
|
||||
// } catch (exception) {
|
||||
// console.error(`❌ ${snippet}`);
|
||||
// console.error(exception);
|
||||
|
||||
fail++;
|
||||
}
|
||||
}
|
||||
// fail++;
|
||||
// }
|
||||
// }
|
||||
|
||||
if (fail) throw new Error(`❌ browser test failed (${fail})`);
|
||||
|
||||
|
||||
Binary file not shown.
@@ -25,5 +25,7 @@
|
||||
"/optional-chain-with-function.js",
|
||||
"/template-literal.js",
|
||||
"/number-literal-bug.js",
|
||||
"/caught-require.js"
|
||||
"/caught-require.js",
|
||||
"/package-json-utf8.js",
|
||||
"/multiple-var.js"
|
||||
]
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(3474597122, "array-args-with-default-values.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3474597122, "array-args-with-default-values.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var lines;
|
||||
const data = () => lines.map(([a = null, b = null, c = null, d = null]) => ({
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(3474597122, "array-args-with-default-values.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3474597122, "array-args-with-default-values.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var lines;
|
||||
const data = () => lines.map(([a = null, b = null, c = null, d = null]) => ({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $bbcd215f from "http://localhost:8080/node_modules/react/index.js";
|
||||
var hello = null ?? "world";
|
||||
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $bbcd215f from "http://localhost:8080/node_modules/react/index.js";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(3012834585, "bundled-entry-point.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3012834585, "bundled-entry-point.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var hello = null ?? "world";
|
||||
function test() {
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $bbcd215f from "http://localhost:8080/node_modules/react/index.js";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(3012834585, "bundled-entry-point.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3012834585, "bundled-entry-point.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var hello = null ?? "world";
|
||||
function test() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $bbcd215f from "http://localhost:8080/node_modules/react/index.js";
|
||||
var hello = null ?? "world";
|
||||
|
||||
|
||||
30
integration/snapshots/caught-require.debug.js
Normal file
30
integration/snapshots/caught-require.debug.js
Normal file
@@ -0,0 +1,30 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
|
||||
} catch (exception) {
|
||||
}
|
||||
|
||||
try {
|
||||
await import("this-package-should-not-exist");
|
||||
} catch (exception) {
|
||||
}
|
||||
import("this-package-should-not-exist").then(() => {
|
||||
}, () => {
|
||||
});
|
||||
export async function test() {
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
} catch (exception) {
|
||||
}
|
||||
try {
|
||||
await import("this-package-should-not-exist");
|
||||
} catch (exception) {
|
||||
}
|
||||
import("this-package-should-not-exist").then(() => {
|
||||
}, () => {
|
||||
});
|
||||
return testDone(import.meta.url);
|
||||
}
|
||||
53
integration/snapshots/caught-require.hmr.debug.js
Normal file
53
integration/snapshots/caught-require.hmr.debug.js
Normal file
@@ -0,0 +1,53 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new FastHMR(2398506918, "caught-require.js", FastRefresh), exports = hmr.exports;
|
||||
await (hmr._load = async function() {
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
} catch (exception) {
|
||||
}
|
||||
try {
|
||||
await import("this-package-should-not-exist");
|
||||
} catch (exception) {
|
||||
}
|
||||
import("this-package-should-not-exist").then(() => {
|
||||
}, () => {
|
||||
});
|
||||
async function test() {
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
} catch (exception) {
|
||||
}
|
||||
try {
|
||||
await import("this-package-should-not-exist");
|
||||
} catch (exception) {
|
||||
}
|
||||
import("this-package-should-not-exist").then(() => {
|
||||
}, () => {
|
||||
});
|
||||
return testDone(import.meta.url);
|
||||
}
|
||||
hmr.exportAll({
|
||||
test: () => test
|
||||
});
|
||||
})();
|
||||
var $$hmr_test = hmr.exports.test;
|
||||
hmr._update = function(exports) {
|
||||
$$hmr_test = exports.test;
|
||||
};
|
||||
|
||||
export {
|
||||
$$hmr_test as test
|
||||
};
|
||||
@@ -1,15 +1,18 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(2398506918, "caught-require.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(2398506918, "caught-require.js", FastRefresh), exports = hmr.exports;
|
||||
await (hmr._load = async function() {
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
try {
|
||||
require((() => { throw (new Error(`Cannot require module '"this-package-should-not-exist"'`)); } )());
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import _login from "http://localhost:8080/_login.js";
|
||||
import _auth from "http://localhost:8080/_auth.js";
|
||||
import * as _loginReally from "http://localhost:8080/_login.js";
|
||||
@@ -11,7 +14,7 @@ import * as _loginReally2 from "http://localhost:8080/_login.js";
|
||||
import * as _authReally from "http://localhost:8080/_auth.js";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(3878252498, "cjs-transform-shouldnt-have-static-imports-in-cjs-function.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3878252498, "cjs-transform-shouldnt-have-static-imports-in-cjs-function.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
function test() {
|
||||
return testDone(import.meta.url);
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import _login from "http://localhost:8080/_login.js";
|
||||
import _auth from "http://localhost:8080/_auth.js";
|
||||
import * as _loginReally from "http://localhost:8080/_login.js";
|
||||
@@ -11,7 +14,7 @@ import * as _loginReally2 from "http://localhost:8080/_login.js";
|
||||
import * as _authReally from "http://localhost:8080/_auth.js";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(3878252498, "cjs-transform-shouldnt-have-static-imports-in-cjs-function.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(3878252498, "cjs-transform-shouldnt-have-static-imports-in-cjs-function.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
function test() {
|
||||
return testDone(import.meta.url);
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(726376257, "code-simplification-neql-define.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(726376257, "code-simplification-neql-define.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var testFailed = false;
|
||||
const invariant = () => {
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(726376257, "code-simplification-neql-define.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(726376257, "code-simplification-neql-define.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var testFailed = false;
|
||||
const invariant = () => {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import * as JSX from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $72625799 from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
var JSX = require($72625799);
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
import * as $5b3cea55 from "http://localhost:8080/node_modules/react-dom/index.js";
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import * as JSX from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $72625799 from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
var JSX = require($72625799);
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
import * as $5b3cea55 from "http://localhost:8080/node_modules/react-dom/index.js";
|
||||
var ReactDOM = require($5b3cea55);
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(2497996991, "custom-emotion-jsx/file.jsx"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(2497996991, "custom-emotion-jsx/file.jsx", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var Foo = () => jsx("div", {
|
||||
css: {content: '"it worked!"' }
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import * as JSX from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $72625799 from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
var JSX = require($72625799);
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
import * as $5b3cea55 from "http://localhost:8080/node_modules/react-dom/index.js";
|
||||
var ReactDOM = require($5b3cea55);
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(2497996991, "custom-emotion-jsx/file.jsx"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(2497996991, "custom-emotion-jsx/file.jsx", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var Foo = () => jsx("div", {
|
||||
css: {content: '"it worked!"' }
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import {
|
||||
__require as require
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import * as JSX from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import * as $72625799 from "http://localhost:8080/node_modules/@emotion/react/jsx-dev-runtime/dist/emotion-react-jsx-dev-runtime.browser.esm.js";
|
||||
var JSX = require($72625799);
|
||||
var jsx = require(JSX).jsxDEV;
|
||||
|
||||
import * as $5b3cea55 from "http://localhost:8080/node_modules/react-dom/index.js";
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import what from "http://localhost:8080/_auth.js";
|
||||
import * as where from "http://localhost:8080/_auth.js";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(1879780259, "export.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(1879780259, "export.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var yoyoyo = "yoyoyo";
|
||||
function hey() {
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import what from "http://localhost:8080/_auth.js";
|
||||
import * as where from "http://localhost:8080/_auth.js";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(1879780259, "export.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(1879780259, "export.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var yoyoyo = "yoyoyo";
|
||||
function hey() {
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(346837007, "forbid-in-is-correct.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(346837007, "forbid-in-is-correct.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var foo = () => {
|
||||
var D = (i, r) => () => (r || i((r = {exports: {} }).exports, r), r.exports);
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(false);
|
||||
|
||||
var hmr = new HMR(346837007, "forbid-in-is-correct.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(346837007, "forbid-in-is-correct.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
var foo = () => {
|
||||
var D = (i, r) => () => (r || i((r = {exports: {} }).exports, r), r.exports);
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import {
|
||||
__HMRModule as HMR
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
import {
|
||||
__HMRClient as Bun
|
||||
} from "http://localhost:8080/__runtime.js";
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshModule as FastHMR
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
import {
|
||||
__FastRefreshRuntime as FastRefresh
|
||||
} from "http://localhost:8080/bun:wrap";
|
||||
Bun.activate(true);
|
||||
|
||||
var hmr = new HMR(713665787, "global-is-remapped-to-globalThis.js"), exports = hmr.exports;
|
||||
var hmr = new FastHMR(713665787, "global-is-remapped-to-globalThis.js", FastRefresh), exports = hmr.exports;
|
||||
(hmr._load = function() {
|
||||
function test() {
|
||||
console.assert(globalThis === globalThis);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user