Compare commits

..

14 Commits

Author SHA1 Message Date
Colin McDonnell
7c9352392d Update readme and regen 2023-06-09 19:21:49 -07:00
Jarred Sumner
fef9853d5a wip 2023-06-05 02:02:13 -07:00
Jarred Sumner
7dae496847 wip 2023-06-04 19:59:22 -07:00
Jarred Sumner
92b060c6e2 fix some CLI things 2023-06-04 19:06:02 -07:00
Jarred Sumner
2a64e8b3bb fixup 2023-06-04 19:05:42 -07:00
Jarred Sumner
fde3b7fbb6 Fix build 2023-06-04 18:54:26 -07:00
Ashcon Partovi
c16e769383 Add types and sample heapsnapshot for JSC and V8 2023-06-04 18:54:26 -07:00
Ashcon Partovi
5badc728d0 Add a DevTools client 2023-06-04 18:54:26 -07:00
Jarred Sumner
c4b3b321c2 slightly more progress 2023-06-04 18:54:26 -07:00
Jarred Sumner
cf599e77d9 Fix C++ compile errors 2023-06-04 18:54:26 -07:00
Jarred Sumner
b727689a9b pushing this but it doesn't fix anything 2023-06-04 18:54:26 -07:00
Ashcon Partovi
4a36470588 Add script to generate DevTools protocol types for JSC and V8 2023-06-04 18:54:26 -07:00
Jarred Sumner
ca08cf6b0a Update BunInspector.cpp 2023-06-04 18:54:26 -07:00
Jarred Sumner
10bd0fac3a WIP support inspector 2023-06-04 18:54:26 -07:00
31168 changed files with 196483 additions and 548119 deletions

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CFLAGS=${CFLAGS:-}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/base64
echo "1: $(which make)"
echo "2: $(which cmake)"
make clean
cmake $CMAKE_FLAGS .
make
cp libbase64.a $BUN_DEPS_OUT_DIR/libbase64.a

View File

@@ -1,22 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
# Create the output directory if it doesn't exist
if (-not (Test-Path $BUN_DEPS_OUT_DIR)) {
New-Item -ItemType Directory -Path $BUN_DEPS_OUT_DIR
}
Set-Location (Join-Path $BUN_DEPS_DIR 'base64')
cmake $CMAKE_FLAGS .
cmake --build . --clean-first --config Release
Copy-Item **/*.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CFLAGS=${CFLAGS:-}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/boringssl
rm -rf build
mkdir -p build
cd build
CFLAGS="$CFLAGS" cmake $CMAKE_FLAGS -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" -GNinja ..
ninja libcrypto.a libssl.a libdecrepit.a
cp **/libcrypto.a $BUN_DEPS_OUT_DIR/libcrypto.a
cp **/libssl.a $BUN_DEPS_OUT_DIR/libssl.a
cp **/libdecrepit.a $BUN_DEPS_OUT_DIR/libdecrepit.a

View File

@@ -1,19 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
mkdir -p $BUN_DEPS_OUT_DIR -Force
Set-Location $BUN_DEPS_DIR/boringssl
cmake $CMAKE_FLAGS .
cmake --build . --target crypto --target ssl --target decrepit --clean-first --config Release
Copy-Item crypto/Release/crypto.lib $BUN_DEPS_OUT_DIR
Copy-Item ssl/Release/ssl.lib $BUN_DEPS_OUT_DIR
Copy-Item decrepit/Release/decrepit.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,24 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
# Create the output directory if it doesn't exist
if (-not (Test-Path $BUN_DEPS_OUT_DIR)) {
New-Item -ItemType Directory -Path $BUN_DEPS_OUT_DIR
}
Set-Location (Join-Path $BUN_DEPS_DIR 'c-ares')
rm -r build -ErrorAction SilentlyContinue
mkdir build -ErrorAction SilentlyContinue
cd build
cmake $CMAKE_FLAGS -DCMAKE_BUILD_TYPE=Release -G "Visual Studio 17 2022" -DCARES_STATIC=ON -DCARES_SHARED=OFF ..
cmake --build . --clean-first --config Release
cp ./lib/Release/*.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
CFLAGS=${CFLAGS:-}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/libarchive
make clean || echo ""
./build/clean.sh || echo ""
./build/autogen.sh
CFLAGS="$CFLAGS" $CCACHE_CC_FLAG ./configure --disable-shared --enable-static --with-pic --disable-bsdtar --disable-bsdcat --disable-rpath --enable-posix-regex-lib --without-xml2 --without-expat --without-openssl --without-iconv --without-zlib
make -j$CPUS
cp ./.libs/libarchive.a $BUN_DEPS_OUT_DIR/libarchive.a

View File

@@ -1,16 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
Set-Location $BUN_DEPS_DIR/libarchive
cmake -DBUILD_SHARED_LIBS=OFF -DENABLE_TEST=OFF -DENABLE_INSTALL=OFF --compile-no-warning-as-error $CMAKE_FLAGS .
cmake --build . --target ALL_BUILD --clean-first --config Release -- /p:WarningLevel=0
Copy-Item libarchive/Release/archive.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CFLAGS=${CFLAGS:-}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/lol-html/c-api
cargo build --release
cp target/release/liblolhtml.a $BUN_DEPS_OUT_DIR

View File

@@ -1,16 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
Set-Location $BUN_DEPS_DIR/lol-html/c-api
cargo build --release --target x86_64-pc-windows-msvc
Copy-Item target/x86_64-pc-windows-msvc/release/lolhtml.lib $BUN_DEPS_OUT_DIR
Copy-Item target/x86_64-pc-windows-msvc/release/lolhtml.pdb $BUN_DEPS_OUT_DIR

View File

@@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
CFLAGS=${CFLAGS:-}
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}
MIMALLOC_VALGRIND_ENABLED_FLAG=${MIMALLOC_VALGRIND_ENABLED_FLAG:-}
mkdir -p $BUN_DEPS_OUT_DIR
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc
make clean || echo ""
CFLAGS="$CFLAGS" cmake $CMAKE_FLAGS $MIMALLOC_OVERRIDE_FLAG $MIMALLOC_VALGRIND_ENABLED_FLAG \
-DCMAKE_BUILD_TYPE=Debug \
-DMI_DEBUG_FULL=1 \
-DMI_SKIP_COLLECT_ON_EXIT=1 \
-DMI_BUILD_SHARED=OFF \
-DMI_BUILD_STATIC=ON \
-DMI_BUILD_TESTS=OFF \
-DMI_OSX_ZONE=OFF \
-DMI_OSX_INTERPOSE=OFF \
-DMI_BUILD_OBJECT=ON \
-DMI_USE_CXX=ON \
-DMI_OVERRIDE=OFF \
-DCMAKE_C_FLAGS="$CFLAGS" \
-DCMAKE_CXX_FLAGS="$CFLAGS" \
-GNinja .
ninja
cp $BUN_DEPS_DIR/mimalloc/libmimalloc-debug.a $BUN_DEPS_OUT_DIR/libmimalloc.a

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
CFLAGS=${CFLAGS:-}
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}
MIMALLOC_VALGRIND_ENABLED_FLAG=${MIMALLOC_VALGRIND_ENABLED_FLAG:-}
mkdir -p $BUN_DEPS_OUT_DIR
rm -rf $BUN_DEPS_DIR/mimalloc/CMakeCache* $BUN_DEPS_DIR/mimalloc/CMakeFiles
cd $BUN_DEPS_DIR/mimalloc
make clean || echo ""
CFLAGS="$CFLAGS" cmake $CMAKE_FLAGS $MIMALLOC_OVERRIDE_FLAG \
-DMI_SKIP_COLLECT_ON_EXIT=1 \
-DMI_BUILD_SHARED=OFF \
-DMI_BUILD_STATIC=ON \
-DMI_BUILD_TESTS=OFF \
-DMI_OSX_ZONE=OFF \
-DMI_OSX_INTERPOSE=OFF \
-DMI_BUILD_OBJECT=ON \
-DMI_USE_CXX=ON \
-DMI_OVERRIDE=OFF \
-DMI_OSX_ZONE=OFF \
-DCMAKE_C_FLAGS="$CFLAGS" \
-GNinja .
ninja
cp $BUN_DEPS_DIR/mimalloc/libmimalloc.a $BUN_DEPS_OUT_DIR/libmimalloc.a

View File

@@ -1,22 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
# Create the output directory if it doesn't exist
if (-not (Test-Path $BUN_DEPS_OUT_DIR)) {
New-Item -ItemType Directory -Path $BUN_DEPS_OUT_DIR
}
Set-Location (Join-Path $BUN_DEPS_DIR 'mimalloc')
cmake $CMAKE_FLAGS -DMI_SKIP_COLLECT_ON_EXIT=1 -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_OSX_ZONE=OFF -DMI_OSX_INTERPOSE=OFF -DMI_BUILD_OBJECT=ON -DMI_USE_CXX=ON -DMI_OVERRIDE=OFF -DMI_OSX_ZONE=OFF -DCMAKE_C_FLAGS="$CFLAGS" .
cmake --build . --clean-first --config Release
Copy-Item **/*.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
AR=${AR:-ar}
CC=${CC:-cc}
CFLAGS=${CFLAGS:-}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/tinycc
make clean
AR=$AR CC=$CC CFLAGS="$CFLAGS" ./configure --enable-static --cc=$CC --ar=$AR --config-predefs=yes
make -j10
cp *.a $BUN_DEPS_OUT_DIR

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zlib
make clean
$CCACHE_CC_FLAG CFLAGS="$CFLAGS" ./configure --static
make -j${CPUS}
cp ./libz.a $BUN_DEPS_OUT_DIR/libz.a

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
set -euxo pipefail
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
CMAKE_FLAGS=${CMAKE_FLAGS:-}
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd ../ && pwd)}
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
CCACHE_CC_FLAG=${CCACHE_CC_FLAG:-}
CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zstd
rm -rf Release CMakeCache.txt CMakeFiles
cmake $CMAKE_FLAGS -DZSTD_BUILD_STATIC=ON -B Release -S build/cmake -G Ninja
ninja -C Release
cp Release/lib/libzstd.a $BUN_DEPS_OUT_DIR/libzstd.a

View File

@@ -1,21 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$SCRIPT_DIR = Split-Path $PSScriptRoot -Parent
$CMAKE_FLAGS = $env:CMAKE_FLAGS
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { $SCRIPT_DIR }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$CCACHE_CC_FLAG = $env:CCACHE_CC_FLAG
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-WmiObject -Class Win32_ComputerSystem).NumberOfLogicalProcessors }
$CFLAGS = $env:CFLAGS
$CXXFLAGS = $env:CXXFLAGS
# Create the output directory if it doesn't exist
if (-not (Test-Path $BUN_DEPS_OUT_DIR)) {
New-Item -ItemType Directory -Path $BUN_DEPS_OUT_DIR
}
Set-Location (Join-Path $BUN_DEPS_DIR 'zstd\build\cmake')
cmake $CMAKE_FLAGS -DZSTD_BUILD_STATIC=ON -DCMAKE_BUILD_TYPE=Release
cmake --build . --clean-first --config Release
Copy-Item lib\*\**.lib $BUN_DEPS_OUT_DIR

View File

@@ -1,8 +0,0 @@
# https://EditorConfig.org
root = true
[*]
charset = utf-8
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf

22
.gitattributes vendored
View File

@@ -1,22 +1,3 @@
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
.vscode/launch.json linguist-generated
src/api/schema.d.ts linguist-generated
fixture.*.c linguist-generated
@@ -24,6 +5,7 @@ src/api/schema.js linguist-generated
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.lockb binary diff=lockb
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
@@ -49,5 +31,3 @@ src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated

View File

@@ -0,0 +1,35 @@
name: 📥 Install Problem
description: Report an issue during install or upgrade
labels: [bug, install]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting an issue, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Please try to include as much information as possible.
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: How did you attempt to install or upgrade?
description: Please provide the commands you ran to install or upgrade.
validations:
required: true
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
validations:
required: true
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -10,15 +10,11 @@ body:
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
description: Copy the output of `bun -v`
- type: input
attributes:
label: What platform is your computer?

View File

@@ -8,7 +8,7 @@ body:
Thank you for submitting an idea. It helps make Bun better.
If you want to discuss Bun, or learn how others are using Bun, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
- type: textarea
attributes:
label: What is the problem this feature would solve?

View File

@@ -1,62 +0,0 @@
### What does this PR do?
<!-- **Please explain what your changes do**, example: -->
<!--
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
-->
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
- [ ] Code changes
### How did you verify your code works?
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
<!-- I wrote automated tests -->
<!-- If JavaScript/TypeScript modules or builtins changed:
- [ ] I ran `make js` and committed the transpiled changes
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
- [ ] I included a test for the new code, or an existing test covers it
-->
<!-- If Zig files changed:
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I or my editor ran `zig fmt` on the changed files
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
-->
<!-- If new methods, getters, or setters were added to a publicly exposed class:
- [ ] I added TypeScript types for the new methods, getters, or setters
-->
<!-- If dependencies in tests changed:
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
-->
<!-- If functions were added to exports.zig or bindings.zig
- [ ] I ran `make headers` to regenerate the C header file
-->
<!-- If \*.classes.ts files were added or changed:
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
-->
<!-- If a new builtin ESM/CJS module was added:
- [ ] I updated Aliases in `module_loader.zig` to include the new module
- [ ] I added a test that imports the module
- [ ] I added a test that require() the module
-->

View File

@@ -1,89 +0,0 @@
name: bun-build
env:
ZIG_VERSION: 0.12.0-dev.888+130227491
concurrency:
group: bun-build-${{ github.ref || github.run_id }}
cancel-in-progress: true
on: [push]
# push:
# branches:
# - main
# - ci/*
# paths:
# # Build files
# # Source files
# - "src/**/*"
# - "packages/{bun-usockets,bun-uws}/src/**/*"
# pull_request:
# branches:
# - main
# paths:
# # Build files
# - "a"
# # Source files
# - "src/**/*"
# - "packages/{bun-usockets,bun-uws}/src/**/*"
# workflow_dispatch: {}
permissions:
contents: read
jobs:
build:
name: Build (${{ matrix.id }})
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
include:
- id: linux-x64
runner: ubuntu-latest
runs-on: ${{ matrix.runner }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: recursive
- name: Restore Cache
uses: actions/cache@v3
with:
key: ${{ runner.os }}-build-${{ matrix.id }}
path: |
build
- name: Setup APT
uses: awalsh128/cache-apt-pkgs-action@latest
with:
version: "1" # increment when packages change
packages: |
make
cmake
ccache
ninja-build
- name: Setup Zig
uses: goto-bus-stop/setup-zig@v2
with:
version: ${{ env.ZIG_VERSION }}
# - name: Setup LLVM
# uses: KyleMayes/install-llvm-action@v1
# with:
# version: "16"
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: "20"
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Setup Dependencies
run: |
bun install
bash .scripts/postinstall.sh
bun install -g esbuild
- name: Setup upterm session
uses: lhotari/action-upterm@v1
with:
limit-access-to-users: Electroid,paperdave,Jarred-Sumner

View File

@@ -0,0 +1,50 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

View File

@@ -0,0 +1,41 @@
name: bun-framework-next
on:
push:
paths:
- packages/bun-framework-next/**/*
branches: [main, bun-framework-next-actions]
pull_request:
paths:
- packages/bun-framework-next/**/*
branches: [main]
jobs:
build:
name: lint, test and build on Node ${{ matrix.node }} and ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
node: ["14.x"]
os: [macOS-latest]
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Use Node ${{ matrix.node }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- name: Install PNPM
uses: pnpm/action-setup@v2.0.1
with:
version: 6.21.0
- name: Install dependencies
run: cd packages/bun-framework-next && pnpm install
- name: Type check bun-framework-next
run: cd packages/bun-framework-next && pnpm check

View File

@@ -16,7 +16,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -29,7 +28,6 @@ jobs:
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -38,7 +36,7 @@ jobs:
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64

View File

@@ -16,7 +16,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -26,7 +25,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -39,7 +37,6 @@ jobs:
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
fail-fast: false
matrix:
@@ -49,7 +46,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
@@ -57,7 +54,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
@@ -155,33 +152,13 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
permissions:
pull-requests: write
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -203,48 +180,23 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install-dependnecies
name: Install dependencies
run: |
sudo apt-get update && sudo apt-get install -y openssl
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -15,7 +15,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -24,7 +23,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -117,36 +115,36 @@ jobs:
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -154,14 +152,14 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -174,11 +172,11 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -249,7 +247,6 @@ jobs:
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -258,29 +255,29 @@ jobs:
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -293,10 +290,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -395,33 +392,12 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -443,47 +419,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -15,7 +15,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -24,7 +23,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -117,36 +115,36 @@ jobs:
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -154,14 +152,14 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -174,11 +172,11 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
@@ -250,7 +248,6 @@ jobs:
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -259,29 +256,29 @@ jobs:
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -294,10 +291,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -399,32 +396,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
@@ -434,7 +410,7 @@ jobs:
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-12
runner: macos-11
steps:
- id: checkout
name: Checkout
@@ -447,50 +423,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
Hey @${{ github.actor }},
${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -15,7 +15,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -24,7 +23,6 @@ on:
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
@@ -117,36 +115,36 @@ jobs:
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -154,14 +152,14 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -174,10 +172,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -252,7 +250,6 @@ jobs:
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -261,29 +258,29 @@ jobs:
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
package: bun-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-2/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -296,10 +293,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -401,32 +398,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
@@ -436,7 +412,7 @@ jobs:
matrix:
include:
- tag: bun-darwin-x64
runner: macos-12
runner: macos-11
steps:
- id: checkout
name: Checkout
@@ -449,47 +425,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

179
.github/workflows/bun-release-canary.yml vendored Normal file
View File

@@ -0,0 +1,179 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -0,0 +1,54 @@
name: bun-release-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

View File

@@ -1,71 +1,52 @@
name: bun-release
concurrency: release
env:
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
on:
release:
types:
- published
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
inputs:
is-latest:
description: Is this the latest release?
type: boolean
default: false
tag:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
description: The tag to publish
required: true
use-docker:
description: Should Docker images be released?
type: boolean
default: false
use-npm:
description: Should npm packages be published?
type: boolean
default: false
use-homebrew:
description: Should binaries be released to Homebrew?
type: boolean
default: false
use-s3:
description: Should binaries be uploaded to S3?
type: boolean
default: false
use-types:
description: Should types be released to npm?
type: boolean
default: false
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: write
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup GPG
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Bun
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Sign Release
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
@@ -73,23 +54,32 @@ jobs:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
permissions:
contents: read
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Bun
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Release
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -97,45 +87,41 @@ jobs:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
permissions:
contents: read
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Node.js
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- name: Setup Bun
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Setup Tag
if: ${{ env.BUN_VERSION == 'canary' }}
run: |
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- name: Build
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
- name: Release (canary)
if: ${{ env.BUN_VERSION == 'canary' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
- name: Release (latest)
if: ${{ env.BUN_LATEST == 'true' }}
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
@@ -144,28 +130,20 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- variant: debian
suffix: ''
- variant: debian
suffix: -debian
- variant: slim
suffix: -slim
dir: debian-slim
- variant: alpine
suffix: -alpine
- variant: distroless
suffix: -distroless
if: github.repository_owner == 'oven-sh'
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Docker emulator
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
@@ -177,60 +155,67 @@ jobs:
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
latest=false
tags: |
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.BUN_VERSION }}
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
permissions:
contents: read
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
if: github.repository_owner == 'oven-sh'
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: gpg
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Ruby
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- name: Update Tap
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
- name: Commit Tap
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
commit_message: Release ${{ env.BUN_VERSION }}
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
@@ -238,23 +223,32 @@ jobs:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
permissions:
contents: read
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Bun
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Release
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}

View File

@@ -12,8 +12,6 @@ jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
permissions:
pull-requests: write
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:

View File

@@ -1,7 +1,7 @@
name: zig-fmt
env:
ZIG_VERSION: 0.12.0-dev.899+027aabf49
ZIG_VERSION: 0.11.0-dev.2571+31738de28
on:
pull_request:
@@ -18,8 +18,6 @@ jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
permissions:
pull-requests: write
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
@@ -30,7 +28,7 @@ jobs:
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |

32
.gitignore vendored
View File

@@ -6,7 +6,6 @@ packages/*/*.wasm
profile.json
node_modules
.envrc
.swcrc
yarn.lock
dist
@@ -97,8 +96,6 @@ packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
*.bc
src/fallback.version
@@ -124,32 +121,3 @@ cold-jsc-start
cold-jsc-start.d
/test.ts
/test.js
src/js/out/modules*
src/js/out/functions*
src/js/out/tmp
src/js/out/DebugPath.h
make-dev-stats.csv
.uuid
tsconfig.tsbuildinfo
build-release
*.lib
*.pdb
CMakeFiles
build.ninja
.ninja_deps
.ninja_log
CMakeCache.txt
cmake_install.cmake
compile_commands.json
*.lib
x64
**/*.vcxproj*
**/*.sln*
**/*.dir
**/*.pdb

15
.gitmodules vendored
View File

@@ -48,6 +48,13 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/uws"]
path = src/deps/uws
url = https://github.com/Jarred-Sumner/uWebSockets
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = true
[submodule "src/deps/tinycc"]
path = src/deps/tinycc
url = https://github.com/Jarred-Sumner/tinycc.git
@@ -61,10 +68,4 @@ fetchRecurseSubmodules = false
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty
[submodule "src/deps/base64"]
path = src/deps/base64
url = https://github.com/aklomp/base64.git
ignore = dirty
depth = 1
shallow = true
ignore = dirty

View File

@@ -6,7 +6,7 @@ module.exports = {
quoteProps: "preserve",
overrides: [
{
files: ["*.md"],
files: "README.md",
options: {
printWidth: 80,
},

View File

@@ -1,6 +1,13 @@
#!/bin/bash
set -euxo pipefail
# if bun-webkit node_modules directory exists
if [ -d ./node_modules/bun-webkit ]; then
rm -f bun-webkit
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-16 || which clang++) .vscode/clang++ 2>/dev/null
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

3
.scripts/write-versions.sh Executable file → Normal file
View File

@@ -7,9 +7,11 @@ LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
USOCKETS=$(cd src/deps/uws/uSockets && git rev-parse HEAD)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
@@ -18,6 +20,7 @@ echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_li
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig

View File

@@ -6,36 +6,31 @@
"includePath": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/"
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
@@ -44,9 +39,8 @@
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi"
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"
@@ -59,70 +53,7 @@
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"cStandard": "c17",
"cppStandard": "c++20"
},
{
"name": "BunWithJSCDebug",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/"
],
"browse": {
"path": [
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb_debug"
},
"defines": [
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
"STATICALLY_LINKED_WITH_WTF=1",
"BUILDING_WITH_CMAKE=1",
"NOMINMAX",
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"ASSERT_ENABLED=0",
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],

68
.vscode/launch.json generated vendored
View File

@@ -4,6 +4,8 @@
// it makes our tests very slow
// But it helps catch memory bugs
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits:
// { "initCommands": ["process handle -p false -s false -n false SIGHUP"] }
"version": "0.2.0",
"configurations": [
{
@@ -12,13 +14,13 @@
"name": "bun test [file]",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -27,12 +29,12 @@
"name": "bun test [file] (fast)",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
@@ -42,11 +44,11 @@
"name": "bun test [file] (verbose)",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -55,12 +57,12 @@
"name": "bun test [file] --watch",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -69,12 +71,12 @@
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -82,13 +84,14 @@
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test", "js/node"],
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -96,13 +99,13 @@
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test", "js"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -111,25 +114,40 @@
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "./Build/debug/bun.exe",
"args": ["run", "${file}", "${file}"],
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"NODE_ENV": "development"
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [Inspect]",
"program": "bun-debug",
"args": ["--inspect-brk", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -144,6 +162,7 @@
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -156,6 +175,7 @@
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -168,6 +188,7 @@
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -180,6 +201,7 @@
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
@@ -201,7 +223,9 @@
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits.
"initCommands": ["process handle -p false -s false -n false SIGHUP"]
},
{
"type": "lldb",
@@ -225,6 +249,7 @@
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
@@ -307,10 +332,13 @@
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"cwd": "/Users/jarred/Build/worky",
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {}
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
}
},
{
"type": "lldb",
"request": "launch",

91
.vscode/settings.json vendored
View File

@@ -7,7 +7,7 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
"zig.formattingProvider": "zls",
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"[zig]": {
@@ -25,8 +25,7 @@
"editor.formatOnSave": true
},
"zig.zls.enableInlayHints": false,
"zig.zls.enabled": true,
"git.ignoreSubmodules": true,
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
@@ -66,40 +65,37 @@
"test/snapshots": true,
"test/snapshots-no-hmr": true,
"src/bun.js/WebKit": true,
// "src/deps/libarchive": true,
// "src/deps/mimalloc": true,
// "src/deps/s2n-tls": true,
// "src/deps/boringssl": true,
// "src/deps/openssl": true,
// "src/deps/uws": true,
// "src/deps/zlib": true,
// "src/deps/lol-html": true,
// "src/deps/c-ares": true,
// "src/deps/tinycc": true,
// "src/deps/zstd": true,
"src/deps/libarchive": true,
"src/deps/mimalloc": true,
"src/deps/s2n-tls": true,
"src/deps/boringssl": true,
"src/deps/openssl": true,
"src/deps/uws": true,
"src/deps/zlib": true,
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true,
"packages/bun-uws/fuzzing/seed-corpus/": true,
"**/*.dep": true
// "**/CMakeFiles": true
"src/js/out": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
"WebKit/JSTests": true,
"WebKit/Tools": true,
"WebKit/WebDriverTests": true,
"WebKit/WebKit.xcworkspace": true,
"WebKit/WebKitLibraries": true,
"WebKit/Websites": true,
"WebKit/resources": true,
"WebKit/LayoutTests": true,
"WebKit/ManualTests": true,
"WebKit/PerformanceTests": true,
"WebKit/WebKitLegacy": true,
"WebKit/WebCore": true,
"WebKit/WebDriver": true,
"WebKit/WebKitBuild": true,
"WebKit/WebInspectorUI": true
"src/bun.js/WebKit/JSTests": true,
"src/bun.js/WebKit/Tools": true,
"src/bun.js/WebKit/WebDriverTests": true,
"src/bun.js/WebKit/WebKit.xcworkspace": true,
"src/bun.js/WebKit/WebKitLibraries": true,
"src/bun.js/WebKit/Websites": true,
"src/bun.js/WebKit/resources": true,
"src/bun.js/WebKit/LayoutTests": true,
"src/bun.js/WebKit/ManualTests": true,
"src/bun.js/WebKit/PerformanceTests": true,
"src/bun.js/WebKit/WebKitLegacy": true,
"src/bun.js/WebKit/WebCore": true,
"src/bun.js/WebKit/WebDriver": true,
"src/bun.js/WebKit/WebKitBuild": true,
"src/bun.js/WebKit/WebInspectorUI": true
},
"[cpp]": {
"editor.defaultFormatter": "xaver.clang-format"
@@ -219,33 +215,10 @@
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp",
"types.h": "c",
"bsd.h": "c",
"xtr1common": "cpp",
"stop_token": "cpp",
"xfacet": "cpp",
"xhash": "cpp",
"xiosbase": "cpp",
"xlocale": "cpp",
"xlocbuf": "cpp",
"xlocinfo": "cpp",
"xlocmes": "cpp",
"xlocmon": "cpp",
"xlocnum": "cpp",
"xloctime": "cpp",
"xmemory": "cpp",
"xstring": "cpp",
"xtree": "cpp",
"xutility": "cpp",
"string.h": "c",
"zutil.h": "c",
"gzguts.h": "c",
"stdatomic.h": "c",
"root_certs.h": "c"
"codecvt": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"eslint.workingDirectories": ["packages/bun-types"],
"typescript.tsdk": "node_modules/typescript/lib",
"zig.initialSetupDone": true
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@@ -1,974 +0,0 @@
cmake_minimum_required(VERSION 3.22)
cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.0.7")
set(WEBKIT_TAG 1a49a1f94bf42ab4f8c6b11d7bbbb21e491d2d62)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
# --- Build Type ---
# This is done at the start simply so this is the first message printed
if(NOT CMAKE_BUILD_TYPE)
message(WARNING "No CMAKE_BUILD_TYPE value specified, defaulting to Debug.\nSet a build type with -DCMAKE_BUILD_TYPE=<Debug|Release>")
set(CMAKE_BUILD_TYPE "Debug" CACHE STRING "Choose the type of build (Debug, Release)" FORCE)
else()
if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release)$")
message(FATAL_ERROR
"Invalid CMAKE_BUILD_TYPE value specified: ${CMAKE_BUILD_TYPE}\n"
"CMAKE_BUILD_TYPE must be Debug or Release.")
endif()
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
endif()
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(DEBUG ON)
set(ZIG_OPTIMIZE "Debug")
set(bun "bun-debug")
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(DEBUG OFF)
set(ZIG_OPTIMIZE "ReleaseFast")
set(bun "bun-profile")
endif()
# --- LLVM ---
# This detection is a little overkill, but it ensures that the set LLVM_VERSION matches under
# any case possible. Sorry for the complexity...
#
# Bun and WebKit must be compiled with the same compiler, so we do as much as we can to ensure that
# the compiler used for the prebuilt WebKit, LLVM 16, is the one that we detect in this process.
#
# It has to be done before project() is called, so that CMake doesnt pick a compiler for us, but even then
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
#
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
set(LLVM_VERSION 16)
macro(BUN_FIND_LLVM)
find_program(
_LLVM_CLANG_PATH
NAMES clang++-${LLVM_VERSION} clang-${LLVM_VERSION} clang++ clang
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s clang++ binary. Please pass -DLLVM_PREFIX with the path to LLVM"
)
if(NOT _LLVM_CLANG_PATH)
message(FATAL_ERROR "Could not find LLVM ${LLVM_VERSION}, search paths: ${PLATFORM_LLVM_SEARCH_PATHS}")
endif()
set(CMAKE_CXX_COMPILER ${_LLVM_CLANG_PATH})
set(CMAKE_C_COMPILER ${_LLVM_CLANG_PATH})
find_program(
STRIP
NAMES llvm-strip
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
)
find_program(
DSYMUTIL
NAMES dsymutil
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s dsymutil binary"
)
find_program(
AR
NAMES llvm-ar
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary"
)
find_program(
RANLIB
NAMES llvm-ranlib
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary"
)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE _tmp)
string(REGEX MATCH "version ([0-9]+)\\.([0-9]+)\\.([0-9]+)" CMAKE_CXX_COMPILER_VERSION "${_tmp}")
set(CMAKE_CXX_COMPILER_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
endmacro()
if(UNIX)
if(LLVM_PREFIX)
set(PLATFORM_LLVM_SEARCH_PATHS ${LLVM_PREFIX}/bin)
else()
set(PLATFORM_LLVM_SEARCH_PATHS /usr/lib/llvm-${LLVM_VERSION}/bin /lib/llvm-${LLVM_VERSION}/bin /usr/bin /usr/local/bin)
if(APPLE)
set(PLATFORM_LLVM_SEARCH_PATHS /opt/homebrew/opt/llvm@${LLVM_VERSION}/bin /opt/homebrew/bin ${PLATFORM_LLVM_SEARCH_PATHS})
endif()
endif()
if(CMAKE_CXX_COMPILER)
set(_LLVM_CLANG_PATH "${CMAKE_CXX_COMPILER}")
endif()
BUN_FIND_LLVM()
else()
# On windows it is expected to use MSVC, and until we get a better build configuration, it is a free-for-all
endif()
set(CMAKE_COLOR_DIAGNOSTICS ON)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_C_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_C_STANDARD_REQUIRED ON)
project(Bun VERSION "${Bun_VERSION}")
# More effort to prevent using the wrong C++ compiler
if(UNIX)
if((NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (NOT CMAKE_CXX_COMPILER_VERSION MATCHES "^${LLVM_VERSION}\."))
# Attempt to auto-correct the compiler
message(STATUS "Compiler mismatch, attempting to auto-correct")
unset(_LLVM_CLANG_PATH)
BUN_FIND_LLVM()
if((NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (NOT CMAKE_CXX_COMPILER_VERSION MATCHES "^${LLVM_VERSION}\."))
message(WARNING "Expected LLVM ${LLVM_VERSION} as the C++ compiler, build may fail or break at runtime.")
endif()
endif()
endif()
message(STATUS "C++ Compiler: ${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION} at ${CMAKE_CXX_COMPILER}")
# --- End LLVM ---
set(DEFAULT_ON_UNLESS_WINDOWS ON)
set(REQUIRED_IF_NOT_WINDOWS "REQUIRED")
if(WIN32)
set(DEFAULT_ON_UNLESS_WINDOWS OFF)
set(REQUIRED_IF_NOT_WINDOWS OFF)
endif()
set(DEFAULT_ON_UNLESS_APPLE ON)
if(APPLE)
set(DEFAULT_ON_UNLESS_APPLE OFF)
endif()
set(CI OFF)
if(DEFINED ENV{CI} OR DEFINED ENV{GITHUB_ACTIONS})
set(CI ON)
endif()
# -- Build Flags --
option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE})
option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ${DEFAULT_ON_UNLESS_WINDOWS})
option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON)
option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON)
option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON)
option(USE_CUSTOM_ZSTD "Use Bun's recommended version of zstd" ON)
option(USE_CUSTOM_CARES "Use Bun's recommended version of c-ares" ${DEFAULT_ON_UNLESS_WINDOWS})
option(USE_CUSTOM_BASE64 "Use Bun's recommended version of libbase64" ON)
option(USE_CUSTOM_LOLHTML "Use Bun's recommended version of lolhtml" ON)
option(USE_CUSTOM_TINYCC "Use Bun's recommended version of tinycc" ON)
option(USE_CUSTOM_LIBUV "Use Bun's recommended version of libuv (Windows only)" OFF)
option(USE_BASELINE_BUILD "Build Bun for baseline (older) CPUs" OFF)
option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore" OFF)
option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF)
option(CANARY "Make `bun --revision` report a canary release" OFF)
set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code")
set(ARCH x86_64)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
set(ARCH aarch64)
endif()
if(NOT CPU_TARGET)
set(CPU_TARGET "native" CACHE STRING "CPU target for the compiler" FORCE)
if (ARCH STREQUAL "x86_64")
if (NOT MSVC)
if (USE_BASELINE_BUILD)
set(CPU_TARGET "nehalem")
else()
set(CPU_TARGET "haswell")
endif()
endif()
endif()
endif()
message(STATUS "Building for CPU Target: ${CPU_TARGET}")
set(ZIG_TARGET "native")
if(WIN32)
set(ZIG_TARGET "${ARCH}-windows-msvc")
endif()
# set(CONFIGURE_DEPENDS "")
set(CONFIGURE_DEPENDS "CONFIGURE_DEPENDS")
# --- CLI Paths ---
# Zig Compiler
function(validate_zig validator_result_var item)
set(${validator_result_var} FALSE PARENT_SCOPE)
# We will allow any valid zig compiler, as long as it contains some text from `zig zen`
# Ideally we would do a version or feature check, but that would be quite slow
execute_process(COMMAND ${item} zen OUTPUT_VARIABLE ZIG_ZEN_OUTPUT)
if(ZIG_ZEN_OUTPUT MATCHES "Together we serve the users")
set(${validator_result_var} TRUE PARENT_SCOPE)
else()
set(${validator_result_var} FALSE PARENT_SCOPE)
endif()
endfunction()
find_program(ZIG_COMPILER zig REQUIRED DOC "Path to the Zig compiler" VALIDATOR validate_zig)
message(STATUS "Found Zig Compiler: ${ZIG_COMPILER}")
# Bun
if(NOT WIN32)
find_program(BUN_EXECUTABLE bun REQUIRED DOC "Path to an already built release of Bun")
message(STATUS "Found Bun: ${BUN_EXECUTABLE}")
else()
set(BUN_EXECUTABLE "echo")
endif()
# Prettier
find_program(PRETTIER prettier DOC "Path to prettier" PATHS ./node_modules/.bin ENV PATH)
# Esbuild (TODO: switch these to "bun build")
find_program(ESBUILD esbuild DOC "Path to esbuild" PATHS ./node_modules/.bin ENV PATH)
# Ruby (only needed for unified sources)
if(USE_UNIFIED_SOURCES)
# ruby 'WebKit/Source/WTF/Scripts/generate-unified-source-bundles.rb' source_list.txt --source-tree-path . --derived-sources-path build/unified-sources
find_program(RUBY ruby DOC "Path to ruby")
endif()
# CCache
find_program(CCACHE_PROGRAM ccache)
if(CCACHE_PROGRAM)
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PROGRAM}")
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PROGRAM}")
message(STATUS "Using ccache: ${CCACHE_PROGRAM}")
endif()
# --- WebKit ---
# WebKit is either prebuilt and distributed via NPM, or you can pass WEBKIT_DIR to use a local build.
# We cannot include their CMake build files (TODO: explain why, for now ask @paperdave why)
#
# On Unix, this will pull from NPM the single package that is needed and use that
if(WIN32)
set(STATIC_LIB_EXT "lib")
set(libJavaScriptCore "JavaScriptCore")
set(libWTF "WTF")
else()
set(STATIC_LIB_EXT "a")
set(libJavaScriptCore "libJavaScriptCore")
set(libWTF "libWTF")
endif()
if(NOT WEBKIT_DIR)
if(WIN32)
message(FATAL_ERROR "Windows does not have prebuilt webkit yet. Please run release-windows.ps1 and pass the path to the built webkit with -DWEBKIT_DIR")
endif()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "")
set(ASSERT_ENABLED "0")
if(USE_DEBUG_JSC)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug")
set(ASSERT_ENABLED "1")
elseif(NOT DEBUG)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
set(ASSERT_ENABLED "0")
endif()
if (WIN32)
set(BUN_WEBKIT_PACKAGE_PLATFORM "win32")
elseif(APPLE)
set(BUN_WEBKIT_PACKAGE_PLATFORM "macos")
else()
set(BUN_WEBKIT_PACKAGE_PLATFORM "linux")
endif()
if(ARCH STREQUAL "x86_64")
set(BUN_WEBKIT_PACKAGE_ARCH "amd64")
elseif(ARCH MATCHES "aarch64|arm64|arm")
set(BUN_WEBKIT_PACKAGE_ARCH "arm64")
endif()
set(BUN_WEBKIT_PACKAGE_NAME "bun-webkit-${BUN_WEBKIT_PACKAGE_PLATFORM}-${BUN_WEBKIT_PACKAGE_ARCH}${BUN_WEBKIT_PACKAGE_NAME_SUFFIX}")
message(STATUS "Using Pre-built WebKit: ${BUN_WEBKIT_PACKAGE_NAME}")
execute_process(
COMMAND ${BUN_EXECUTABLE}
"${CMAKE_CURRENT_SOURCE_DIR}/src/codegen/download-webkit.ts"
"--outdir=${BUN_WORKDIR}/bun-webkit"
"--tag=${WEBKIT_TAG}"
"--package=${BUN_WEBKIT_PACKAGE_NAME}"
WORKING_DIRECTORY ${BUN_WORKDIR}
)
if(NOT EXISTS "${BUN_WORKDIR}/bun-webkit")
message(FATAL_ERROR "Prebuilt WebKit package ${BUN_WEBKIT_PACKAGE_NAME} failed to install")
endif()
set(WEBKIT_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include")
set(WEBKIT_LIB_DIR "${BUN_WORKDIR}/bun-webkit/lib")
else()
# Setting WEBKIT_DIR means you either have a path to the WebKit repo, or you have a path to packaged webkit
# Non-packaged webkit has CMakeLists.txt
if(EXISTS "${WEBKIT_DIR}/CMakeLists.txt")
# Since we may be doing a Debug build of Bun but with a Release build of JSC, we can't
# include their CMakeLists directly here, but rather we need to run `cmake` as a dependency
# of our build. It'll still have decent caching which is what really matters.
# cmake WEBKIT_DIR -B WEBKIT_DIR/WebKitBuild/WEBKIT_BUILD_TYPE
# -DPORT=JSCOnly
# -DENABLE_STATIC_JSC=ON
# -DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON
# -DCMAKE_BUILD_TYPE=Debug
# -DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON
# -DUSE_THIN_ARCHIVES=OFF
# -DENABLE_FTL_JIT=ON
# -DCMAKE_C_COMPILER=(which clang-16)
# -DDCMAKE_CXX_COMPILER=(which clang++-16)
# -DDUSE_BUN_JSC_ADDITIONS=1
# -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld"
# -DCMAKE_AR=$(which llvm-ar)
# -DCMAKE_RANLIB=$(which llvm-ranlib)
# -DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON
# -G Ninja
# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0
# -DPTHREAD_JIT_PERMISSIONS_API=1
# -DUSE_PTHREAD_JIT_PERMISSIONS_API=ON
# -DENABLE_REMOTE_INSPECTOR=ON
message(FATAL_ERROR "TODO: Setting WEBKIT_DIR to the WebKit repository to enable automatic builds. For now you need to run the release script, and point to the packaged directory.")
else()
if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}")
if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$")
message(FATAL_ERROR "WebKit directory ${WEBKIT_DIR} does not contain all the required files for Bun. Did you forget to init submodules?")
endif()
message(FATAL_ERROR "WebKit directory ${WEBKIT_DIR} does not contain all the required files for Bun. Expected a path to the oven-sh/WebKit repository, or a path to a folder containing `include` and `lib`.")
endif()
set(WEBKIT_INCLUDE_DIR "${WEBKIT_DIR}/include")
set(WEBKIT_LIB_DIR "${WEBKIT_DIR}/lib")
message(STATUS "Using specified WebKit directory: ${WEBKIT_DIR}")
set(ASSERT_ENABLED "0")
message(STATUS "WebKit assertions: OFF")
endif()
endif()
# --- CMake Macros ---
# Append the given dependencies to the source file
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
set(_tmp)
get_source_file_property(_tmp ${_source} OBJECT_DEPENDS)
if(NOT _tmp)
set(_tmp "")
endif()
foreach(f ${_deps})
list(APPEND _tmp "${f}")
endforeach()
set_source_files_properties(${_source} PROPERTIES OBJECT_DEPENDS "${_tmp}")
unset(_tmp)
endmacro()
# --- BUILD ---
set(BUN_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src")
set(BUN_DEPS_DIR "${BUN_SRC}/deps")
set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen")
file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
"${BUN_SRC}/deps/*.cpp"
"${BUN_SRC}/io/*.cpp"
"${BUN_SRC}/bun.js/modules/*.cpp"
"${BUN_SRC}/bun.js/bindings/*.cpp"
"${BUN_SRC}/bun.js/bindings/webcore/*.cpp"
"${BUN_SRC}/bun.js/bindings/sqlite/*.cpp"
"${BUN_SRC}/bun.js/bindings/webcrypto/*.cpp"
"${BUN_SRC}/bun.js/bindings/webcrypto/*/*.cpp"
"${BUN_SRC}/deps/picohttpparser/picohttpparser.c"
)
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
"${USOCKETS_SRC}/*.c"
"${USOCKETS_SRC}/eventing/*.c"
"${USOCKETS_SRC}/internal/*.c"
"${USOCKETS_SRC}/crypto/*.c"
"${USOCKETS_SRC}/crypto/*.cpp"
)
# --- Classes Generator ---
file(GLOB BUN_CLASSES_TS ${CONFIGURE_DEPENDS}
"${BUN_SRC}/bun.js/*.classes.ts"
"${BUN_SRC}/bun.js/api/*.classes.ts"
"${BUN_SRC}/bun.js/test/*.classes.ts"
"${BUN_SRC}/bun.js/webcore/*.classes.ts"
"${BUN_SRC}/bun.js/node/*.classes.ts"
)
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+lazyStructureHeader.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+DOMClientIsoSubspaces.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+DOMIsoSubspaces.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+lazyStructureImpl.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig"
COMMAND ${BUN_EXECUTABLE} "${BUN_CODEGEN_SRC}/generate-classes.ts" ${BUN_CLASSES_TS} "${BUN_WORKDIR}/codegen"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-classes.ts"
DEPENDS ${BUN_CLASSES_TS}
VERBATIM
COMMENT "Generating *.classes.ts bindings"
)
# --- JSSink Generator ---
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/JSSink.cpp"
"${BUN_WORKDIR}/codegen/JSSink.h"
COMMAND ${BUN_EXECUTABLE} "src/codegen/generate-jssink.ts" "${BUN_WORKDIR}/codegen"
VERBATIM
MAIN_DEPENDENCY "src/codegen/generate-jssink.ts"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Generating JSSink"
)
# --- .lut.h Generator ---
set(BUN_OBJECT_LUT_SOURCES
bun.js/bindings/BunObject.cpp
bun.js/bindings/ZigGlobalObject.lut.txt
bun.js/bindings/JSBuffer.cpp
bun.js/bindings/BunProcess.cpp
bun.js/bindings/ProcessBindingConstants.cpp
bun.js/bindings/ProcessBindingNatives.cpp
)
set(BUN_HASH_LUT_GENERATOR "${BUN_CODEGEN_SRC}/create-hash-table.ts")
macro(GENERATE_HASH_LUT _input _output _display_name)
add_custom_command(
OUTPUT ${_output}
MAIN_DEPENDENCY ${BUN_HASH_LUT_GENERATOR}
DEPENDS ${_input}
COMMAND ${BUN_EXECUTABLE} ${BUN_HASH_LUT_GENERATOR} ${_input} ${_output}
VERBATIM
COMMENT "Generating ${_display_name}"
)
# list(APPEND JavaScriptCore_HEADERS ${_output})
WEBKIT_ADD_SOURCE_DEPENDENCIES(${_input} ${_output})
endmacro()
foreach(_file ${BUN_OBJECT_LUT_SOURCES})
if(NOT EXISTS "${BUN_SRC}/${_file}")
message(FATAL_ERROR "Could not find ${_file} needed for LUT generation")
endif()
get_filename_component(_name ${_file} NAME_WE)
# workaround for ZigGlobalObject
if(_name MATCHES "ZigGlobalObject")
set(_name "ZigGlobalObject")
endif()
GENERATE_HASH_LUT(${BUN_SRC}/${_file} ${BUN_WORKDIR}/codegen/${_name}.lut.h ${_name}.lut.h)
endforeach()
WEBKIT_ADD_SOURCE_DEPENDENCIES(${BUN_SRC}/bun.js/bindings/ZigGlobalObject.cpp ${BUN_WORKDIR}/codegen/ZigGlobalObject.lut.h)
# --- Identifier Cache ---
set(BUN_IDENTIFIER_CACHE_OUT
"${BUN_SRC}/js_lexer/id_continue_bitset.blob"
"${BUN_SRC}/js_lexer/id_continue_bitset.meta.blob"
"${BUN_SRC}/js_lexer/id_start_bitset.blob"
"${BUN_SRC}/js_lexer/id_start_bitset.meta.blob")
add_custom_command(
OUTPUT ${BUN_IDENTIFIER_CACHE_OUT}
MAIN_DEPENDENCY "${BUN_SRC}/js_lexer/identifier_data.zig"
DEPENDS "${BUN_SRC}/js_lexer/identifier_cache.zig"
COMMAND ${ZIG_COMPILER} run "${BUN_SRC}/js_lexer/identifier_data.zig"
VERBATIM
COMMENT "Building Identifier Cache"
)
# --- Bundled TS/JS ---
# Note: It's not worth doing this in parallel at the CMake/Ninja level, because this bundling
# requires all the JS files to be known, but also Bun will use all cores during bundling anyways.
file(GLOB BUN_TS_MODULES ${CONFIGURE_DEPENDS}
"${BUN_SRC}/js/node/*.ts"
"${BUN_SRC}/js/node/*.js"
"${BUN_SRC}/js/bun/*.js"
"${BUN_SRC}/js/bun/*.ts"
"${BUN_SRC}/js/thirdparty/*.js"
"${BUN_SRC}/js/thirdparty/*.ts"
"${BUN_SRC}/js/internal/*.js"
"${BUN_SRC}/js/internal/*.ts"
)
file(GLOB BUN_TS_FUNCTIONS ${CONFIGURE_DEPENDS} "${BUN_SRC}/js/builtins/*.ts")
add_custom_command(
OUTPUT
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+createInternalModuleById.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+enum.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+numberOfModules.h"
"${BUN_WORKDIR}/codegen/NativeModuleImpl.h"
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
"${BUN_WORKDIR}/codegen/SyntheticModuleType.h"
COMMAND ${BUN_EXECUTABLE} "${BUN_SRC}/codegen/bundle-modules.ts" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_MODULES}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS modules"
)
WEBKIT_ADD_SOURCE_DEPENDENCIES(
"${BUN_SRC}/bun.js/bindings/InternalModuleRegistry.cpp"
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
)
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
COMMAND ${BUN_EXECUTABLE} "${BUN_SRC}/codegen/bundle-functions.ts" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_FUNCTIONS} "${BUN_SRC}/codegen/bundle-functions.ts"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS builtin functions"
)
# --- Peechy API ---
add_custom_command(
OUTPUT "${BUN_SRC}/api/schema.js"
"${BUN_SRC}/api/schema.d.ts"
"${BUN_SRC}/api/schema.zig"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/node_modules/.bin/peechy"
"--schema" "${BUN_SRC}/api/schema.peechy"
"--esm" "${BUN_SRC}/api/schema.js"
"--ts" "${BUN_SRC}/api/schema.d.ts"
"--zig" "${BUN_SRC}/api/schema.zig"
COMMAND "${ZIG_COMPILER}" "fmt" "src/api/schema.zig"
COMMAND "${PRETTIER}" "--config=.prettierrc.cjs" "--write" "src/api/schema.js" "src/api/schema.d.ts"
DEPENDS "${BUN_SRC}/api/schema.peechy"
COMMENT "Building schema"
)
add_custom_command(
OUTPUT "${BUN_SRC}/analytics/analytics_schema.zig"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/node_modules/.bin/peechy"
"--schema" "${BUN_SRC}/analytics/schema.peechy"
"--zig" "${BUN_SRC}/analytics/analytics_schema.zig"
COMMAND "${ZIG_COMPILER}" "fmt" "${BUN_SRC}/analytics/analytics_schema.zig"
DEPENDS "${BUN_SRC}/api/schema.peechy"
COMMENT "Building analytics_schema.zig"
)
# --- Zig Object ---
file(GLOB ZIG_FILES
"${BUN_SRC}/*.zig"
"${BUN_SRC}/**/*.zig"
"${BUN_SRC}/**/**/*.zig"
"${BUN_SRC}/**/**/**/*.zig"
)
if(DEBUG)
set(BUN_ZIG_OBJ "${BUN_WORKDIR}/CMakeFiles/bun-debug.o")
else()
set(BUN_ZIG_OBJ "${BUN_WORKDIR}/CMakeFiles/bun.o")
endif()
add_custom_command(
OUTPUT "${BUN_ZIG_OBJ}"
COMMAND
"${ZIG_COMPILER}" "build" "obj"
"-Doutput-dir=${BUN_WORKDIR}/CMakeFiles"
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
"-Dversion=${Bun_VERSION}"
"-Dcanary=$<IF:$<BOOL:${CANARY}>,true,false>"
"-Doptimize=${ZIG_OPTIMIZE}"
"-Dcpu=${CPU_TARGET}"
"-Dtarget=${ZIG_TARGET}"
DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
"${ZIG_FILES}"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig"
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
"${BUN_IDENTIFIER_CACHE_OUT}"
"${BUN_SRC}/api/schema.zig"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Building zig code"
VERBATIM
# This is here to show Zig's progress indicator
USES_TERMINAL
)
set(BUN_EXTRA_SOURCES "")
if(WIN32)
set(BUN_EXTRA_SOURCES "${BUN_SRC}/bun.js/bindings/windows/musl-memmem.c")
include_directories("${BUN_SRC}/bun.js/bindings/windows")
endif()
# -- The Buntime™ ---
add_executable(
${bun}
${BUN_CPP}
${USOCKETS_FILES}
${BUN_ZIG_OBJ}
"${BUN_WORKDIR}/codegen/JSSink.cpp"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp"
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
"${BUN_ZIG_OBJ}"
"${BUN_EXTRA_SOURCES}"
)
set_target_properties(${bun} PROPERTIES
CXX_STANDARD 20
CXX_STANDARD_REQUIRED YES
CXX_EXTENSIONS YES
CXX_VISIBILITY_PRESET hidden
C_STANDARD 17
C_STANDARD_REQUIRED YES
VISIBILITY_INLINES_HIDDEN YES
)
# Set /subsystem:console on bun for windows
if(WIN32)
set_target_properties(${bun} PROPERTIES LINK_FLAGS " /SUBSYSTEM:CONSOLE ")
endif()
add_compile_definitions(
# TODO: are all of these variables strictly necessary?
"_HAS_EXCEPTIONS=0"
"LIBUS_USE_OPENSSL=1"
"UWS_HTTPRESPONSE_NO_WRITEMARK=1"
"LIBUS_USE_BORINGSSL=1"
"WITH_BORINGSSL=1"
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
"STATICALLY_LINKED_WITH_WTF=1"
"STATICALLY_LINKED_WITH_BMALLOC=1"
"BUILDING_WITH_CMAKE=1"
"JSC_OBJC_API_ENABLED=0"
"BUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1"
"NAPI_EXPERIMENTAL=ON"
"NOMINMAX"
"IS_BUILD"
"BUILDING_JSCONLY__"
"ASSERT_ENABLED=$<IF:$<CONFIG:ASSERT_ENABLED>,1,0>"
"BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\""
)
if(NOT ASSERT_ENABLED)
add_compile_definitions("NDEBUG=1")
endif()
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/packages/
${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets
${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src
${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings
${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/webcore
${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/webcrypto
${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/sqlite
${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/modules
${CMAKE_CURRENT_SOURCE_DIR}/src/js/builtins
${CMAKE_CURRENT_SOURCE_DIR}/src/napi
${CMAKE_CURRENT_SOURCE_DIR}/src/deps
${CMAKE_CURRENT_SOURCE_DIR}/src/deps/picohttpparser
${WEBKIT_INCLUDE_DIR}
"${BUN_WORKDIR}/codegen"
)
# --- clang and linker flags ---
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
if(NOT MSVC)
target_compile_options(${bun} PUBLIC -g3 -O1)
endif()
add_compile_definitions("BUN_DEBUG=1")
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if (MSVC)
target_compile_options(${bun} PUBLIC /O2)
else()
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm)
endif()
endif()
if(NOT MSVC)
if(NOT CI)
target_compile_options(${bun} PRIVATE -fdiagnostics-color=always)
endif()
target_compile_options(${bun} PUBLIC
-march=${CPU_TARGET}
-mtune=${CPU_TARGET}
-fconstexpr-steps=1271242
-fconstexpr-depth=27
-fno-exceptions
-fvisibility=hidden
-fvisibility-inlines-hidden
-fno-rtti
-ferror-limit=${ERROR_LIMIT}
-fPIC
-fno-omit-frame-pointer
)
string(APPEND CMAKE_CXX_FLAGS " -std=c++2a ")
else() # MSVC
string(APPEND SUPPRESS_WARNING_NUMBERS
# JSC deletes operator delete to prevent accidental use
"/wd4291 "
# we use #pragma mark in some places
"/wd4068"
)
string(APPEND CMAKE_CXX_FLAGS " /EHsc /GR-")
string(APPEND CMAKE_C_FLAGS " /EHsc /GR- ${SUPPRESS_WARNING_NUMBERS} /experimental:c11atomics /std:c17")
string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus /Zc:inline /bigobj ${SUPPRESS_WARNING_NUMBERS}")
endif()
if(APPLE)
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
target_link_options(${bun} PUBLIC "-dead_strip")
target_link_options(${bun} PUBLIC "-dead_strip_dylibs")
target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt")
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt")
target_link_options(${bun} PUBLIC "-fno-keep-static-consts")
target_link_libraries(${bun} PRIVATE "resolv")
endif()
if(UNIX AND NOT APPLE)
target_link_options(${bun} PUBLIC
"-static-libstdc++"
"-static-libgcc"
"-fuse-ld=lld"
"-Wl,-z,now"
"-Wl,--as-needed"
"-Wl,--gc-sections"
"-Wl,-z,stack-size=12800000"
"-Wl,--wrap=fcntl"
"-Wl,--wrap=fcntl64"
"-Wl,--wrap=stat64"
"-Wl,--wrap=pow"
"-Wl,--wrap=exp"
"-Wl,--wrap=log"
"-Wl,--wrap=log2"
"-Wl,--wrap=lstat"
"-Wl,--wrap=stat"
"-Wl,--wrap=fstat"
"-Wl,--wrap=fstatat"
"-Wl,--wrap=lstat64"
"-Wl,--wrap=stat64"
"-Wl,--wrap=fstat64"
"-Wl,--wrap=fstatat64"
"-Wl,--wrap=mknod"
"-Wl,--wrap=mknodat"
"-Wl,--wrap=statx "
"-Wl,--compress-debug-sections=zlib"
"-Bsymbolics-functions"
"-rdynamic"
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
"-Wl,--version-script=${BUN_SRC}/linker.lds"
)
target_link_libraries(${bun} PRIVATE "c")
target_link_libraries(${bun} PRIVATE "libatomic.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicudata.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicui18n.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicuuc.a")
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/linker.lds")
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.dyn")
endif()
if(WIN32)
add_compile_definitions(
"WIN32"
"_WINDOWS"
"_CRT_SECURE_NO_WARNINGS"
"WIN32_LEAN_AND_MEAN=1"
)
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
endif()
if(APPLE)
# TODO: a much better check can be done to find this path
find_path(
ICU4C_DIR NAMES lib/libicudata.a
PATHS ENV PATH /usr/local/opt/icu4c /opt/homebrew/opt/icu4c
)
find_path(
ICONV_DIR NAMES lib/libiconv.a
PATHS ENV PATH /usr/local/opt/libiconv /opt/homebrew/opt/libiconv
)
target_link_libraries(${bun} PRIVATE "icucore")
target_link_libraries(${bun} PRIVATE "${ICONV_DIR}/lib/libiconv.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicudata.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicui18n.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicuuc.a")
include_directories(${ICU4C_DIR}/include)
endif()
# --- Stripped Binary "bun"
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32)
add_custom_command(
TARGET ${bun}
POST_BUILD
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
COMMENT "Stripping Symbols"
)
add_custom_command(
TARGET ${bun}
POST_BUILD
COMMAND ${STRIP} -s -x -S -o ${BUN_WORKDIR}/bun ${BUN_WORKDIR}/${bun}
COMMENT "Stripping Symbols"
)
endif()
# --- Dependencies ---
if(USE_CUSTOM_ZLIB AND (NOT WIN32))
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libz.a")
include_directories(${BUN_DEPS_DIR}/zlib/include)
else()
find_package(ZLIB REQUIRED)
target_link_libraries(${bun} PRIVATE ZLIB::ZLIB)
endif()
if(USE_CUSTOM_BORINGSSL)
include_directories(src/deps/boringssl/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/crypto.lib")
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/ssl.lib")
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/decrepit.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libcrypto.a")
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libssl.a")
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libdecrepit.a")
endif()
else()
include(FindBoringSSL)
FindBoringSSL(${bun})
endif()
if(USE_CUSTOM_LIBARCHIVE)
include_directories(src/deps/libarchive/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/archive.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libarchive.a")
endif()
else()
find_package(LibArchive REQUIRED)
target_link_libraries(${bun} PRIVATE LibArchive::LibArchive)
endif()
if(USE_CUSTOM_MIMALLOC)
include_directories(src/deps/mimalloc/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/mimalloc-static.lib")
elseif(APPLE)
# https://github.com/microsoft/mimalloc/issues/512
# Linking mimalloc via object file on macOS x64 can cause heap corruption
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libmimalloc.a")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libmimalloc.o")
endif()
else()
find_package(mimalloc REQUIRED)
target_link_libraries(${bun} PRIVATE mimalloc)
endif()
if(USE_CUSTOM_ZSTD)
include_directories(src/deps/zstd/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/zstd.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libzstd.a")
endif()
else()
find_package(zstd CONFIG REQUIRED)
target_link_libraries(${bun} PRIVATE zstd::libzstd)
endif()
if(USE_CUSTOM_CARES)
include_directories(src/deps/c-ares/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/cares.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libcares.a")
endif()
else()
find_package(c-ares CONFIG REQUIRED)
target_link_libraries(${bun} PRIVATE c-ares::cares)
endif()
if(USE_CUSTOM_BASE64)
include_directories(src/deps/base64/include)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/base64.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libbase64.a")
endif()
else()
find_package(base64 REQUIRED)
target_link_libraries(${bun} PRIVATE base64::base64)
endif()
if(NOT WIN32)
if (USE_CUSTOM_TINYCC)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/tcc.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/libtcc.a")
endif()
else()
find_package(tinycc REQUIRED)
target_link_libraries(${bun} PRIVATE tinycc::tinycc)
endif()
endif()
if(USE_CUSTOM_LOLHTML)
if (WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/lolhtml.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/liblolhtml.a")
endif()
else()
find_package(lolhtml REQUIRED)
target_link_libraries(${bun} PRIVATE lolhtml::lolhtml)
endif()
if(WIN32)
if (USE_CUSTOM_LIBUV)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/uv.lib")
else()
find_package(libuv CONFIG REQUIRED )
target_link_libraries(${bun} PRIVATE $<IF:$<TARGET_EXISTS:libuv::uv_a>,libuv::uv_a,libuv::uv>)
endif()
message(STATUS "Found libuv: ${libuv_LIBRARIES}")
endif()
if(USE_STATIC_SQLITE)
add_library(sqlite3 STATIC src/bun.js/bindings/sqlite/sqlite3.c)
target_include_directories(sqlite3 PUBLIC src/bun.js/bindings/sqlite)
target_link_libraries(${bun} PRIVATE sqlite3)
message(STATUS "Using static sqlite3")
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
else()
message(STATUS "Using dynamicly linked sqlite3")
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
endif()
if(NOT MSVC)
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libWTF.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
else()
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/WTF.lib")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/JavaScriptCore.lib")
if (WIN32)
string (APPEND CMAKE_CXX_FLAGS
" /external:anglebrackets /Gs- /Zi"
)
string (APPEND CMAKE_FLAGS
" /external:anglebrackets /Gs- /Zi"
)
set_target_properties(${bun} PROPERTIES LINK_FLAGS " /SUBSYSTEM:CONSOLE /STACK:4194304,2097152")
endif()
if (DEFINED ENV{VCPKG_ROOT})
include_directories($ENV{VCPKG_ROOT}/installed/x64-windows/include)
endif()
# include_directories(C:/Users/windo/Build/WebKit/WebKitBuild/WTF/DerivedSources)
# include_directories(C:/Users/windo/Build/WebKit/WebKitBuild/WTF/Headers)
target_include_directories(${bun} PUBLIC C:/Users/windo/Code/WebKit/WebKitLibraries/win/include)
target_link_directories(${bun} PUBLIC C:/Users/windo/Code/WebKit/WebKitLibraries/win/lib64)
target_link_directories(${bun} PUBLIC C:/Users/windo/Code/lib64)
target_link_libraries(${bun} PUBLIC icuuc icudt icutu icuio icuin icutest)
target_link_libraries(${bun} PUBLIC winmm ws2_32 bcrypt ntdll kernel32 shell32 shlwapi advapi32 vcruntime ucrt legacy_stdio_definitions)
endif()

View File

@@ -10,7 +10,7 @@ Today (February 2023), Bun's codebase has five distinct parts:
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
- JavaScript runtime bindings ([`src/bun.js/bindings/**/*.cpp`](src/bun.js/bindings/))
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
- Package manager ([`src/install/`](src/install/))
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
@@ -47,18 +47,34 @@ TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and
Copy from examples like `Subprocess` or `Response`.
### ESM Modules and Builtins JS
### ESM modules
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
Builtin modules in Bun are located in [`src/js`](src/js/). These files are transpiled and support a JavaScriptCore-only syntax for internal slots, which is explained further in [`src/js/README.md`](src/js/README.md).
Native C++ modules are in `src/bun.js/modules/`.
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
### JavaScript Builtins
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
To regenerate the builtins:
```sh
make clean-bindings && make generate-builtins && make bindings -j10
```
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)

View File

@@ -1,90 +1,663 @@
FROM bitnami/minideb:bullseye as base
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG CPU_TARGET=native
ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=may20
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG CLANG_VERSION="16"
ARG NODE_VERSION="20"
ARG ZIG_VERSION="0.12.0-dev.1114+e8f3c4c4b"
ARG DEBIAN_FRONTEND="noninteractive"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.6
RUN apt-get update -y \
&& install_packages \
ca-certificates \
curl \
gnupg \
&& echo "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${CLANG_VERSION} main" > /etc/apt/sources.list.d/llvm.list \
&& echo "deb-src https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${CLANG_VERSION} main" >> /etc/apt/sources.list.d/llvm.list \
&& curl -fsSL "https://apt.llvm.org/llvm-snapshot.gpg.key" | apt-key add - \
&& echo "deb https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" > /etc/apt/sources.list.d/nodesource.list \
&& curl -fsSL "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" | apt-key add - \
&& echo "deb https://apt.kitware.com/ubuntu/ focal main" > /etc/apt/sources.list.d/kitware.list \
&& curl -fsSL "https://apt.kitware.com/keys/kitware-archive-latest.asc" | apt-key add - \
&& install_packages \
wget \
bash \
lsb-release \
software-properties-common \
build-essential \
autoconf \
automake \
libtool \
pkg-config \
clang-${CLANG_VERSION} \
lld-${CLANG_VERSION} \
lldb-${CLANG_VERSION} \
clangd-${CLANG_VERSION} \
make \
cmake \
ccache \
ninja-build \
file \
gnupg \
libc-dev \
libxml2 \
libxml2-dev \
xz-utils \
libtcc-dev \
git \
tar \
rsync \
gzip \
unzip \
perl \
python3 \
ruby \
golang \
nodejs \
&& ln -s /usr/bin/clang-${CLANG_VERSION} /usr/bin/clang \
&& ln -s /usr/bin/clang++-${CLANG_VERSION} /usr/bin/clang++ \
&& ln -s /usr/bin/lld-${CLANG_VERSION} /usr/bin/lld \
&& ln -s /usr/bin/lldb-${CLANG_VERSION} /usr/bin/lldb \
&& ln -s /usr/bin/clangd-${CLANG_VERSION} /usr/bin/clangd \
&& ln -s /usr/bin/llvm-ar-${CLANG_VERSION} /usr/bin/llvm-ar \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) variant="x86_64";; \
arm64) variant="aarch64";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
esac \
&& echo "https://ziglang.org/builds/zig-linux-${variant}-${ZIG_VERSION}.tar.xz" \
&& curl -fsSL "https://ziglang.org/builds/zig-linux-${variant}-${ZIG_VERSION}.tar.xz" | tar xJ --strip-components=1 \
&& mv zig /usr/bin/zig \
&& curl "https://sh.rustup.rs" -sSf | sh -s -- -y \
&& mv ${HOME}/.cargo/bin/* /usr/bin/ \
&& npm install -g bun esbuild
FROM bitnami/minideb:bullseye as bun-base
ARG CXX="clang++-${CLANG_VERSION}"
ARG CC="clang-${CLANG_VERSION}"
ARG LD="lld-${CLANG_VERSION}"
ARG AR="/usr/bin/llvm-ar-${CLANG_VERSION}"
RUN install_packages ca-certificates curl wget lsb-release software-properties-common gnupg gnupg1 gnupg2
COPY package.json package.json
COPY Makefile Makefile
COPY CMakeLists.txt CMakeLists.txt
COPY src/ src/
COPY packages/bun-usockets/ packages/bun-usockets/
COPY packages/bun-uws/ packages/bun-uws/
COPY .scripts/ .scripts/
COPY .build/ .build/
COPY *.zig ./
RUN wget https://apt.llvm.org/llvm.sh && \
chmod +x llvm.sh && \
./llvm.sh 15
RUN ./.build/base64.bash
RUN install_packages \
cmake \
curl \
file \
git \
gnupg \
libc-dev \
libxml2 \
libxml2-dev \
make \
ninja-build \
perl \
python3 \
rsync \
ruby \
unzip \
xz-utils \
bash tar gzip ccache
ENV CXX=clang++-15
ENV CC=clang-15
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages nodejs && \
npm install -g esbuild
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ENV WEBKIT_OUT_DIR=${WEBKIT_DIR}
ENV BUILDARCH=${BUILDARCH}
ENV AR=/usr/bin/llvm-ar-15
ENV ZIG "${ZIG_PATH}/zig"
ENV PATH="$ZIG/bin:$PATH"
ENV LD=lld-15
RUN mkdir -p $BUN_DIR $BUN_DEPS_OUT_DIR
FROM bun-base as bun-base-with-zig-and-webkit
WORKDIR $GITHUB_WORKSPACE
ADD $ZIG_URL .
RUN tar xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && mv ${ZIG_FOLDERNAME} zig;
WORKDIR $GITHUB_WORKSPACE
ARG GITHUB_WORKSPACE
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG WEBKIT_BASENAME
ADD ${WEBKIT_URL} .
RUN mkdir -p ${WEBKIT_DIR} && cd ${GITHUB_WORKSPACE} && \
gunzip ${WEBKIT_BASENAME}.tar.gz && tar -xf ${WEBKIT_BASENAME}.tar && \
cat ${WEBKIT_DIR}/include/cmakeconfig.h > /dev/null
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
LABEL org.opencontainers.image.source=https://github.com/oven-sh/bun
FROM bun-base as c-ares
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make c-ares && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
FROM bun-base as lolhtml
RUN install_packages build-essential && curl https://sh.rustup.rs -sSf | sh -s -- -y
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-15) && cd ${BUN_DIR} && \
make lolhtml && rm -rf src/deps/lol-html Makefile
FROM bun-base as mimalloc
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
make mimalloc && rm -rf src/deps/mimalloc Makefile
FROM bun-base as zlib
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
make zlib && rm -rf src/deps/zlib Makefile
FROM bun-base as libarchive
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages autoconf automake libtool pkg-config
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
ENV CCACHE_DIR=/ccache
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
make libarchive && rm -rf src/deps/libarchive Makefile
FROM bun-base as tinycc
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR}
FROM bun-base as boringssl
RUN install_packages golang
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && make boringssl && rm -rf src/deps/boringssl Makefile
FROM bun-base as uws
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/uws ${BUN_DIR}/src/deps/uws
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make uws && rm -rf src/deps/uws Makefile
FROM bun-base as picohttp
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/picohttpparser ${BUN_DIR}/src/deps/picohttpparser
COPY src/deps/*.c ${BUN_DIR}/src/deps/
COPY src/deps/*.h ${BUN_DIR}/src/deps/
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make picohttp
FROM bun-base-with-zig-and-webkit as identifier_cache
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig
COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig
RUN cd $BUN_DIR && \
make identifier-cache && rm -rf zig-cache Makefile
FROM bun-base-with-zig-and-webkit as node_fallbacks
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks
RUN cd $BUN_DIR && \
make node-fallbacks && rm -rf src/node-fallbacks/node_modules Makefile
FROM bun-base-with-zig-and-webkit as prepare_release
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY ./root.zig ${BUN_DIR}/root.zig
COPY ./src ${BUN_DIR}/src
COPY ./build.zig ${BUN_DIR}/build.zig
COPY ./completions ${BUN_DIR}/completions
COPY ./packages ${BUN_DIR}/packages
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./package.json ${BUN_DIR}/package.json
COPY ./misctools ${BUN_DIR}/misctools
COPY Makefile ${BUN_DIR}/Makefile
FROM prepare_release as compile_release_obj
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
ARG ARCH
ARG TRIPLET
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ARG GIT_SHA
ARG BUN_BASE_VERSION
ENV BUN_BASE_VERSION=${BUN_BASE_VERSION}
ENV GIT_SHA=${GIT_SHA}
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
COPY ./src/build-id ${BUN_DIR}/src/build-id
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG GIT_SHA
ARG TRIPLET
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY --from=compile_release_obj /tmp/bun-${TRIPLET}-${GIT_SHA}/*.o /
FROM prepare_release as compile_cpp
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
# Required for webcrypto bindings
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
FROM prepare_release as build_release
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=build_release_obj /*.o /tmp
COPY --from=build_release_cpp /*.o ${BUN_DIR}/src/bun.js/bindings-obj/
COPY --from=build_release_cpp /*.a ${BUN_DEPS_OUT_DIR}/
RUN cd $BUN_DIR && mkdir -p ${BUN_RELEASE_DIR} && make bun-relink copy-to-bun-release-dir && \
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages
FROM scratch as artifact
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /bun
COPY --from=build_release ${BUN_RELEASE_DIR}/bun-profile /bun-profile
COPY --from=build_release ${BUN_DEPS_OUT_DIR}/* /bun-dependencies
COPY --from=build_release_obj /*.o /bun-obj
FROM prepare_release as build_unit
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
WORKDIR $BUN_DIR
ENV PATH "$ZIG_PATH:$PATH"
ENV LIB_ICU_PATH "${WEBKIT_DIR}/lib"
CMD make headers \
api \
analytics \
bun_error \
fallback_decoder \
bindings -j10 && \
make \
run-all-unit-tests
# FROM bun-test-base as test_base
# ARG DEBIAN_FRONTEND=noninteractive
# ARG GITHUB_WORKSPACE=/build
# ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
# # Directory extracts to "bun-webkit"
# ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
# ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
# ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
# ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
# ARG BUILDARCH=amd64
# RUN groupadd -r chromium && useradd -d ${BUN_DIR} -M -r -g chromium -G audio,video chromium \
# && mkdir -p /home/chromium/Downloads && chown -R chromium:chromium /home/chromium
# USER chromium
# WORKDIR $BUN_DIR
# ENV NPM_CLIENT bun
# ENV PATH "${BUN_DIR}/packages/bun-linux-x64:${BUN_DIR}/packages/bun-linux-aarch64:$PATH"
# ENV CI 1
# ENV BROWSER_EXECUTABLE /usr/bin/chromium
# COPY ./test ${BUN_DIR}/test
# COPY Makefile ${BUN_DIR}/Makefile
# COPY package.json ${BUN_DIR}/package.json
# COPY .docker/run-test.sh ${BUN_DIR}/run-test.sh
# COPY ./bun.lockb ${BUN_DIR}/bun.lockb
# # # We don't want to worry about architecture differences in this image
# COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-aarch64/bun
# COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-x64/bun
# USER root
# RUN chgrp -R chromium ${BUN_DIR} && chmod g+rwx ${BUN_DIR} && chown -R chromium:chromium ${BUN_DIR}
# USER chromium
# CMD [ "bash", "run-test.sh" ]
# FROM release

308
Makefile
View File

@@ -6,6 +6,8 @@ BUN_AUTO_UPDATER_REPO = Jarred-Sumner/bun-releases-for-updater
CMAKE_CXX_COMPILER_LAUNCHER_FLAG :=
# 'make' command will trigger the help target
.DEFAULT_GOAL := help
@@ -18,7 +20,7 @@ CPU_TARGET ?= native
MARCH_NATIVE = -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH =
MMD_IF_LOCAL =
MMD_IF_LOCAL =
DEFAULT_MIN_MACOS_VERSION=
ARCH_NAME :=
DOCKER_BUILDARCH =
@@ -38,7 +40,8 @@ NATIVE_OR_OLD_MARCH = -march=nehalem
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 1.0
BUN_BASE_VERSION = 0.6
CI ?= false
AR=
@@ -49,8 +52,6 @@ endif
BUN_OR_NODE = $(shell which bun 2>/dev/null || which node 2>/dev/null)
CXX_VERSION=c++2a
TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-$(TRIPLET)
@@ -65,7 +66,7 @@ PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
PRETTIER ?= $(shell which prettier 2>/dev/null || echo "./node_modules/.bin/prettier")
ESBUILD = "$(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")"
ESBUILD = $(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")
DSYMUTIL ?= $(shell which dsymutil 2>/dev/null || which dsymutil-15 2>/dev/null)
WEBKIT_DIR ?= $(realpath src/bun.js/WebKit)
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
@@ -73,7 +74,7 @@ WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug
WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO
NPM_CLIENT = "$(shell which bun 2>/dev/null || which npm 2>/dev/null)"
NPM_CLIENT ?= $(shell which bun 2>/dev/null || which npm 2>/dev/null)
ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable")
# We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore
@@ -81,9 +82,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-16 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-16 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-16 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-15 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-15 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-15 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -107,14 +108,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@16)
LLVM_PREFIX = $(shell brew --prefix llvm@15)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@16' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@15' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -154,7 +155,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
-DCMAKE_RANLIB=$(which llvm-15-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
@@ -176,7 +177,7 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-16 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-15 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
@@ -186,6 +187,11 @@ BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-excep
BUN_TMP_DIR := /tmp/make-bun
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
DEFAULT_USE_BMALLOC := 1
USE_BMALLOC ?= DEFAULT_USE_BMALLOC
# Set via postinstall
ifeq (,$(realpath $(JSC_BASE_DIR)))
JSC_BASE_DIR = $(realpath $(firstword $(wildcard bun-webkit)))
@@ -268,7 +274,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-16 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-15 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -343,10 +349,10 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
-I$(ZLIB_INCLUDE_DIR)
UWS_INCLUDE_DIR := -I$(BUN_DIR)/packages/bun-usockets/src -I$(BUN_DIR)/packages -I$(BUN_DEPS_DIR)
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include -Isrc/bun.js/modules
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
ifeq ($(OS_NAME),linux)
@@ -374,7 +380,9 @@ ICU_FLAGS ?=
# Ideally, we could just look up the linker search paths
ifeq ($(OS_NAME),linux)
LIB_ICU_PATH ?= $(JSC_LIB)
ICU_FLAGS += $(LIB_ICU_PATH)/libicuuc.a $(LIB_ICU_PATH)/libicudata.a $(LIB_ICU_PATH)/libicui18n.a
ICU_FLAGS += $(LIB_ICU_PATH)/libicuuc.a $(LIB_ICU_PATH)/libicudata.a $(LIB_ICU_PATH)/libicui18n.a
else
LIB_ICU_PATH ?= $(BUN_DEPS_DIR)
endif
ifeq ($(OS_NAME),darwin)
@@ -393,7 +401,6 @@ CLANG_FLAGS = $(INCLUDE_DIRS) \
-DSTATICALLY_LINKED_WITH_BMALLOC=1 \
-DBUILDING_WITH_CMAKE=1 \
-DBUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1 \
-DNAPI_EXPERIMENTAL=ON \
-DNDEBUG=1 \
-DNOMINMAX \
-DIS_BUILD \
@@ -446,8 +453,7 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
-ldecrepit \
-lssl \
-lcrypto \
-llolhtml \
-lbase64
-llolhtml
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-larchive \
@@ -542,17 +548,23 @@ tinycc:
cd $(TINYCC_DIR) && \
make clean && \
AR=$(AR) $(CCACHE_CC_FLAG) CFLAGS='$(CFLAGS_WITHOUT_MARCH) $(NATIVE_OR_OLD_MARCH) -mtune=native $(TINYCC_CFLAGS)' ./configure --enable-static --cc=$(CCACHE_CC_OR_CC) --ar=$(AR) --config-predefs=yes && \
make libtcc.a -j10 && \
make -j10 && \
cp $(TINYCC_DIR)/*.a $(BUN_DEPS_OUT_DIR)
PYTHON=$(shell which python 2>/dev/null || which python3 2>/dev/null || which python2 2>/dev/null)
.PHONY: esm
js: # to rebundle js (rebuilding binary not needed to reload js code)
NODE_ENV=production bun src/js/_codegen/index.ts
.PHONY: builtins
builtins:
NODE_ENV=production bun src/js/builtins/codegen/index.ts --minify
.PHONY: esm
esm:
NODE_ENV=production bun src/js/build-esm.ts
.PHONY: generate-builtins
generate-builtins: builtins
esm-debug:
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
BUN_TYPES_REPO_PATH ?= $(realpath packages/bun-types)
@@ -653,10 +665,10 @@ else
PKGNAME_NINJA := ninja-build
endif
.PHONY: assert-deps
assert-deps:
.PHONY: require
require:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@16"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@15"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -666,24 +678,10 @@ assert-deps:
@which $(LIBTOOL) > /dev/null || (echo -e "ERROR: libtool is required. Install with:\n\n $(POSIX_PKG_MANAGER) install libtool"; exit 1)
@which ninja > /dev/null || (echo -e "ERROR: Ninja is required. Install with:\n\n $(POSIX_PKG_MANAGER) install $(PKGNAME_NINJA)"; exit 1)
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
@which rustc > /dev/null || (echo -e "ERROR: rustc is required." exit 1)
@which cargo > /dev/null || (echo -e "ERROR: cargo is required." exit 1)
@test $(shell cargo --version | awk '{print $$2}' | cut -d. -f2) -gt 57 || (echo -e "ERROR: cargo version must be at least 1.57."; exit 1)
@echo "You have the dependencies installed! Woo"
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
# unless you explicity clone it yourself (a huge download)
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
endif
.PHONY: init-submodules
init-submodules: submodule # (backwards-compatibility alias)
.PHONY: submodule
submodule: ## to init or update all submodules
git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES)
init-submodules:
git submodule update --init --recursive --progress --depth=1 --checkout
.PHONY: build-obj
build-obj:
@@ -699,46 +697,44 @@ dev-build-obj-wasm:
.PHONY: dev-wasm
dev-wasm: dev-build-obj-wasm
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-g2 -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
packages/debug-bun-freestanding-wasm32/bun-wasm.o --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/debug-bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/debug-bun-freestanding-wasm32/bun-wasm.wasm
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: build-obj-wasm
build-obj-wasm:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: build-obj-wasm-small
build-obj-wasm-small:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-Oz -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
$(ZIG) build bun-wasm -Doptimize=ReleaseSmall -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o -Oz --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: wasm
wasm: api mimalloc-wasm build-obj-wasm-small
@rm -rf packages/bun-wasm/*.{d.ts,d.cts,d.mts,js,wasm,cjs,mjs,tsbuildinfo}
wasm: api build-obj-wasm-small
@rm -rf packages/bun-wasm/*.{d.ts,js,wasm,cjs,mjs,tsbuildinfo}
@cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
@cp src/api/schema.d.ts packages/bun-wasm/schema.d.ts
@cp src/api/schema.js packages/bun-wasm/schema.js
@cd packages/bun-wasm && $(NPM_CLIENT) run tsc -- -p .
@cp packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.cts
@mv packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.mts
@bun build --sourcemap=external --external=fs --outdir=packages/bun-wasm --target=browser --minify ./packages/bun-wasm/index.ts
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.mjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.mjs.map
@$(ESBUILD) --sourcemap=external --external:fs --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.cjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.cjs.map
@rm -rf packages/bun-wasm/*.tsbuildinfo
@@ -752,17 +748,17 @@ build-obj-safe:
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions -fno-rtti
UWS_LDFLAGS = -I$(BUN_DEPS_DIR)/boringssl/include -I$(ZLIB_INCLUDE_DIR)
USOCKETS_DIR = $(BUN_DIR)/packages/bun-usockets
USOCKETS_SRC_DIR = $(USOCKETS_DIR)/src
USOCKETS_DIR = $(BUN_DEPS_DIR)/uws/uSockets/
USOCKETS_SRC_DIR = $(BUN_DEPS_DIR)/uws/uSockets/src/
usockets:
rm -rf $(USOCKETS_DIR)/*.i $(USOCKETS_DIR)/*.bc $(USOCKETS_DIR)/*.o $(USOCKETS_DIR)/*.s $(USOCKETS_DIR)/*.ii $(USOCKETS_DIR)/*.s $(BUN_DEPS_OUT_DIR)/libusockets.a
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
rm -rf $(BUN_DEPS_DIR)/uws/uSockets/*.o $(BUN_DEPS_DIR)/uws/uSockets/**/*.o $(BUN_DEPS_DIR)/uws/uSockets/*.a $(BUN_DEPS_DIR)/uws/uSockets/*.bc
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/libusockets.a $(USOCKETS_DIR)/*.{o,bc}
uws: usockets
$(CXX_WITH_CCACHE) -O2 $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(USOCKETS_SRC_DIR) $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
$(CXX_WITH_CCACHE) -O2 $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
.PHONY: sign-macos-x64
sign-macos-x64:
@@ -803,7 +799,7 @@ fmt-cpp:
.PHONY: fmt-zig
fmt-zig:
cd src && $(ZIG) fmt **/*.zig
cd src && zig fmt **/*.zig
.PHONY: fmt
fmt: fmt-cpp fmt-zig
@@ -892,8 +888,7 @@ check-glibc-version-dependency:
ifeq ($(OS_NAME),darwin)
zig-win32:
$(ZIG) build -Dtarget=x86_64-windows
# Hardened runtime will not work with debugging
bun-codesign-debug:
@@ -937,7 +932,6 @@ headers:
$(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig
$(BUN_OR_NODE) misctools/headers-cleaner.js
$(ZIG) fmt src/bun.js/bindings/headers.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedCode.cpp
.PHONY: jsc-bindings-headers
jsc-bindings-headers: headers
@@ -1082,30 +1076,17 @@ test/wiptest/run: test/wiptest/run.o
release-bin-dir:
echo $(PACKAGE_DIR)
.PHONY: dev-obj-track
dev-obj-track:
bun .scripts/make-dev-timer.ts $(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-notrack
dev-obj-notrack:
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj
dev-obj:
ifeq ($(shell which bun),)
dev-obj : dev-obj-notrack
else
dev-obj : dev-obj-track
endif
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-linux
dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
.PHONY: dev
dev: mkdir-dev esm dev-obj bun-link-lld-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)
@@ -1189,12 +1170,10 @@ jsc-build-mac-compile:
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1213,11 +1192,9 @@ jsc-build-mac-compile-lto:
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DCMAKE_C_FLAGS="-flto=full" \
-DCMAKE_CXX_FLAGS="-flto=full" \
-DENABLE_FTL_JIT=ON \
@@ -1242,8 +1219,6 @@ jsc-build-mac-compile-debug:
-DUSE_THIN_ARCHIVES=OFF \
-DENABLE_FTL_JIT=ON \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1262,13 +1237,11 @@ jsc-build-linux-compile-config:
cmake \
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-USE_VISIBILITY_ATTRIBUTE=1 \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-G Ninja \
@@ -1283,7 +1256,7 @@ jsc-build-linux-compile-config:
jsc-build-linux-compile-build:
mkdir -p $(WEBKIT_RELEASE_DIR) && \
cd $(WEBKIT_RELEASE_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects" \
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
@@ -1380,24 +1353,18 @@ mimalloc:
mimalloc-wasm:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -GNinja -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=OFF .; emmake cmake --build .;
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
# alias for link, incase anyone still types that
.PHONY: bun-link-lld-debug
bun-link-lld-debug: link
.PHONY: link
link: ## link a debug build of bun
bun-link-lld-debug:
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
-W \
-o $(DEBUG_BIN)/bun-debug
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
link-no-jsc:
bun-link-lld-debug-no-jsc:
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
@@ -1474,17 +1441,17 @@ bun-relink: bun-relink-copy bun-link-lld-release bun-link-lld-release-dsym
bun-relink-fast: bun-relink-copy bun-link-lld-release-no-lto
wasm-return1:
$(ZIG) build-lib -OReleaseSmall test/bun.js/wasm-return-1-test.zig -femit-bin=test/bun.js/wasm-return-1-test.wasm -target wasm32-freestanding
zig build-lib -OReleaseSmall test/bun.js/wasm-return-1-test.zig -femit-bin=test/bun.js/wasm-return-1-test.wasm -target wasm32-freestanding
generate-classes:
bun src/codegen/generate-classes.ts
bun src/bun.js/scripts/generate-classes.ts
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
generate-sink:
bun src/codegen/generate-jssink.js
bun src/bun.js/scripts/generate-jssink.js
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
./src/bun.js/scripts/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/} \/\/ namespace JSC//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1510,7 +1477,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1521,7 +1488,7 @@ $(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1532,7 +1499,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1543,7 +1510,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1554,7 +1521,7 @@ $(OBJ_DIR)/%.o: src/io/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1565,7 +1532,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1576,7 +1543,7 @@ $(OBJ_DIR)/%.o: src/js/out/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1588,7 +1555,7 @@ $(OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1602,7 +1569,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
@@ -1617,7 +1584,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1630,7 +1597,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/io/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
@@ -1646,7 +1613,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1661,7 +1628,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1676,7 +1643,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/js/out/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1689,7 +1656,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1704,7 +1671,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
-fno-exceptions \
-I$(SRC_DIR) \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1718,7 +1685,7 @@ sizegen:
# Linux uses bundled SQLite3
ifeq ($(OS_NAME),linux)
sqlite:
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
endif
picohttp:
@@ -1814,10 +1781,10 @@ endif
endif
.PHONY: build-unit
build-unit: # to build your unit tests
build-unit: ## to build your unit tests
@rm -rf zig-out/bin/$(testname)
@mkdir -p zig-out/bin
$(ZIG) test $(realpath $(testpath)) \
zig test $(realpath $(testpath)) \
$(testfilterflag) \
$(PACKAGE_MAP) \
--main-pkg-path $(BUN_DIR) \
@@ -1832,10 +1799,10 @@ build-unit: # to build your unit tests
cp zig-out/bin/$(testname) $(testbinpath)
.PHONY: run-all-unit-tests
run-all-unit-tests: # to run your unit tests
run-all-unit-tests: ## to run your unit tests
@rm -rf zig-out/bin/__main_test
@mkdir -p zig-out/bin
$(ZIG) test src/main.zig \
zig test src/main.zig \
$(PACKAGE_MAP) \
--main-pkg-path $(BUN_DIR) \
--test-no-exec \
@@ -1852,11 +1819,15 @@ run-all-unit-tests: # to run your unit tests
run-unit:
@zig-out/bin/$(testname) $(ZIG)
.PHONY: help
help: ## to print this help
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
.PHONY: test
test: build-unit run-unit
.PHONY: integration-test-dev
integration-test-dev: # to run integration tests
integration-test-dev: ## to run integration tests
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js
copy-install:
@@ -1870,10 +1841,6 @@ copy-to-bun-release-dir-bin:
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: base64
base64:
cd $(BUN_DEPS_DIR)/base64 && make clean && rm -rf CMakeCache.txt CMakeFiles && cmake $(CMAKE_FLAGS) . && make
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
.PHONY: cold-jsc-start
cold-jsc-start:
@@ -1883,7 +1850,7 @@ cold-jsc-start:
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(LIBICONV_PATH) \
$(DEFAULT_LINKER_FLAGS) \
$(PLATFORM_LINKER_FLAGS) \
@@ -1892,63 +1859,30 @@ cold-jsc-start:
misctools/cold-jsc-start.cpp -o cold-jsc-start
.PHONY: vendor-without-npm
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd
.PHONY: vendor-without-check
vendor-without-check: npm-install vendor-without-npm
.PHONY: vendor
vendor: assert-deps submodule vendor-without-check
vendor: require init-submodules vendor-without-check
.PHONY: vendor-dev
vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
.PHONY: static-hash-table
static-hash-table:
bun src/js/_codegen/static-hash-tables.ts
.PHONY: cpp
cpp: ## compile src/js/builtins + all c++ code then link
@make clean-bindings js
@make static-hash-table
.PHONY: regenerate-bindings
regenerate-bindings:
@make clean-bindings builtins
@make bindings -j$(CPU_COUNT)
@make link
.PHONY: cpp
cpp-no-link:
@make clean-bindings js
@make bindings -j$(CPU_COUNT)
.PHONY: zig
zig: ## compile zig code then link
@make mkdir-dev dev-obj link
.PHONY: zig-no-link
zig-no-link:
@make mkdir-dev dev-obj
.PHONY: dev
dev: # combo of `make cpp` and `make zig`
@make cpp-no-link zig-no-link -j2
@make link
.PHONY: setup
setup: vendor-dev identifier-cache clean-bindings
make jsc-check dev
make jsc-check
make bindings -j$(CPU_COUNT)
@echo ""
@echo "First build complete!"
@echo "\"bun-debug\" is available at $(DEBUG_BIN)/bun-debug"
@echo "Development environment setup complete"
@echo "Run \`make dev\` to build \`bun-debug\`"
@echo ""
.PHONY: help
help: ## to print this help
@echo "For detailed build instructions, see https://bun.sh/docs/project/contributing"
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
print_linker_flags:
@echo $(CLANG_FLAGS)

View File

@@ -24,14 +24,14 @@
## What is Bun?
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
> **Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keeps tabs on future releases.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
```bash
bun run index.tsx # TS and JSX supported out-of-the-box
bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
@@ -93,8 +93,7 @@ bun upgrade --canary
- [`bun run`](https://bun.sh/docs/cli/run)
- [`bun install`](https://bun.sh/docs/cli/install)
- [`bun test`](https://bun.sh/docs/cli/test)
- [`bun init`](https://bun.sh/docs/cli/init)
- [`bun create`](https://bun.sh/docs/cli/bun-create)
- [`bun create`](https://bun.sh/docs/cli/create)
- [`bunx`](https://bun.sh/docs/cli/bunx)
- Runtime
- [Runtime](https://bun.sh/docs/runtime/index)
@@ -124,6 +123,7 @@ bun upgrade --canary
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
- [Testing](https://bun.sh/docs/api/test)
- [Utils](https://bun.sh/docs/api/utils)
- [DNS](https://bun.sh/docs/api/dns)
- [Node-API](https://bun.sh/docs/api/node-api)
## Contributing

View File

@@ -1,12 +0,0 @@
# Security Policy
## Supported Versions
| Version | Supported |
| ------- | ------------------ |
| 1.x.x | :white_check_mark: |
## Reporting a Vulnerability
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.

View File

@@ -1,30 +0,0 @@
// https://github.com/nodejs/node/issues/34493
import { AsyncLocalStorage } from "async_hooks";
const asyncLocalStorage = new AsyncLocalStorage();
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
let fn = () => /test/.test("test");
let runWithExpiry = async (expiry, fn) => {
let iterations = 0;
while (Date.now() < expiry) {
await fn();
iterations++;
}
return iterations;
};
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
let withAls;
await asyncLocalStorage.run(123, async () => {
withAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
});
asyncLocalStorage.disable();
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");

View File

@@ -3,6 +3,6 @@
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.7.0"
"bun-types": "^0.5.0"
}
}
}

View File

@@ -10,7 +10,7 @@ To run in Bun:
```bash
# so it doesn't run the vitest one
bun test expect-to-equal.test.js
bun wiptest expect-to-equal.test.js
```
To run in Jest:

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
);
};
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
\\
++ SIMULATE_LONG_FILE;
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -5,4 +5,4 @@
"jsx": "react-jsx",
"paths": {}
}
}
}

View File

@@ -1,33 +1,11 @@
# `install` benchmark
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine). The goal of this benchmark is to compare installation performance of Bun with other package managers _when caches are hot_.
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
### With lockfile, online mode
To run the benchmark with the standard "install" command for each package manager:
```sh
```
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
```
### With lockfile, offline mode
Even though all packages are cached, some tools may hit the npm API during the version resolution step. (This is not the same as re-downloading a package.) To entirely avoid network calls, the other package managers require `--prefer-offline/--offline` flag. To run the benchmark using "offline" mode:
```sh
$ hyperfine --prepare 'rm -rf node_modules' --runs 1 'bun install' 'pnpm install --prefer-offline' 'yarn --offline' 'npm install --prefer-offline'
```
### Without lockfile, offline mode
To run the benchmark with offline mode but without lockfiles:
```sh
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lockb && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
```
##
To check that the app is working as expected:
```

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("noop", function () {});
bench("async function(){}", async function () {});
@@ -6,30 +6,23 @@ bench("await 1", async function () {
return await 1;
});
if (typeof process !== "undefined") {
bench("process.nextTick x 100", async function () {
var remaining = 100;
var cb, promise;
promise = new Promise(resolve => {
cb = resolve;
});
for (let i = 0; i < 100; i++) {
process.nextTick(() => {
if (--remaining === 0) cb();
});
}
return promise;
});
bench("await 1 x 100", async function () {
for (let i = 0; i < 100; i++) await 1;
});
function callnextTick(resolve) {
process.nextTick(resolve);
}
function awaitNextTick() {
return new Promise(callnextTick);
}
bench("promise.nextTick", async function () {
return awaitNextTick();
});
bench("await new Promise(resolve => resolve())", async function () {
await new Promise(resolve => resolve());
});
bench("Promise.all(Array.from({length: 100}, () => new Promise((resolve) => resolve())))", async function () {
return Promise.all(Array.from({ length: 100 }, () => Promise.resolve(1)));
});
await run();

View File

@@ -1,29 +0,0 @@
import { bench, run } from "./runner.mjs";
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");
const uuid = crypto.randomBytes(16);
bench(`Buffer(${bigBuffer.byteLength}).toString('base64')`, () => {
return bigBuffer.toString("base64");
});
bench(`Buffer(${uuid.byteLength}).toString('base64')`, () => {
return uuid.toString("base64");
});
bench(`Buffer(${bigBuffer.byteLength}).toString('hex')`, () => {
return bigBuffer.toString("hex");
});
bench(`Buffer(${uuid.byteLength}).toString('hex')`, () => {
return uuid.toString("hex");
});
bench(`Buffer(${bigBuffer.byteLength}).toString('ascii')`, () => {
return bigBuffer.toString("ascii");
});
await run();

View File

@@ -1,3 +0,0 @@
import { cp } from "fs/promises";
await cp(process.argv[2], process.argv[3], { recursive: true });

View File

@@ -1,31 +0,0 @@
import { mkdirSync, writeFileSync } from "fs";
import { bench, run } from "./runner.mjs";
import { cp } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
const hugeDirectory = (() => {
const root = join(tmpdir(), "huge");
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
mkdirSync(base, {
recursive: true,
});
for (let i = 0; i < 1000; i++) {
writeFileSync(join(base, "file-" + i + ".txt"), "Hello, world! " + i);
}
return root;
})();
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
const hugeText = "Hello, world!".repeat(1000000);
writeFileSync(hugeFilePath, hugeText);
var hugeCopyI = 0;
bench("cp -r (1000 files)", async b => {
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
});
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
});
await run();

View File

@@ -1,6 +1,12 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import crypto from "node:crypto";
var crypto = globalThis.crypto;
if (!crypto) {
crypto = await import("node:crypto");
}
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);
@@ -16,8 +22,4 @@ bench("crypto.randomUUID()", () => {
return crypto.randomUUID()[2];
});
bench("crypto.randomInt()", () => {
return crypto.randomInt(0, 100);
});
await run();

View File

@@ -1,12 +0,0 @@
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {
Error.captureStackTrace(err);
});
bench("Error.prototype.stack", () => {
new Error().stack;
});
await run();

View File

@@ -1,65 +0,0 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
return 42;
},
set() {
throw new Error("bad");
},
configurable: true,
});
if (module.exports !== 42) throw new Error("bad");
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
});
bench("Object.defineProperty(module.exports = {})", () => {
Object.defineProperty(module, "exports", {
value: { abc: 123 },
});
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
bench("module.exports = {}", () => {
module.exports = { abc: 123 };
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
run().then(() => {
module.exports = {
a: 1,
};
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
module.exports = function lol() {
return 42;
};
console.log(module.exports, module.exports());
queueMicrotask(() => {
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
});
});
});

View File

@@ -1,100 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// This is a benchmark of the performance impact of using private properties.
bench("Polyfillprivate", () => {
"use strict";
var __classPrivateFieldGet =
(this && this.__classPrivateFieldGet) ||
function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var __classPrivateFieldSet =
(this && this.__classPrivateFieldSet) ||
function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
throw new TypeError("Cannot write private member to an object whose class did not declare it");
return kind === "a" ? f.call(receiver, value) : f ? (f.value = value) : state.set(receiver, value), value;
};
var _Foo_state, _Foo_inc;
class Foo {
constructor() {
_Foo_state.set(this, 1);
_Foo_inc.set(this, 13);
}
run() {
let n = 1000000;
while (n-- > 0) {
__classPrivateFieldSet(
this,
_Foo_state,
__classPrivateFieldGet(this, _Foo_state, "f") + __classPrivateFieldGet(this, _Foo_inc, "f"),
"f",
);
}
return n;
}
}
(_Foo_state = new WeakMap()), (_Foo_inc = new WeakMap());
new Foo().run();
});
bench("NativePrivates", () => {
class Foo {
#state = 1;
#inc = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this.#state += this.#inc;
}
return n;
}
}
new Foo().run();
});
bench("ConventionalPrivates", () => {
class Foo {
_state = 1;
_inc = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this._state += this._inc;
}
return n;
}
}
new Foo().run();
});
const _state = Symbol("state");
const _inc = Symbol("inc");
bench("SymbolPrivates", () => {
class Foo {
[_state] = 1;
[_inc] = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this[_state] += this[_inc];
}
return n;
}
}
new Foo().run();
});
await run();

View File

@@ -1,33 +0,0 @@
import { bench, run } from "./runner.mjs";
import { performance } from "perf_hooks";
bench("process.memoryUsage()", () => {
process.memoryUsage();
});
bench("process.memoryUsage.rss()", () => {
process.memoryUsage.rss();
});
bench("process.cpuUsage()", () => {
process.cpuUsage();
});
const init = process.cpuUsage();
bench("process.cpuUsage(delta)", () => {
process.cpuUsage(init);
});
bench("performance.now()", () => {
performance.now();
});
bench("process.hrtime()", () => {
process.hrtime();
});
bench("process.hrtime.bigint()", () => {
process.hrtime.bigint();
});
await run();

View File

@@ -1,17 +0,0 @@
import { bench, run } from "./runner.mjs";
import { readFileSync, existsSync } from "node:fs";
import { readFile } from "node:fs/promises";
bench(`readFileSync(/tmp/404-not-found)`, () => {
try {
readFileSync("/tmp/404-not-found");
} catch (e) {}
});
bench(`readFile(/tmp/404-not-found)`, async () => {
try {
await readFile("/tmp/404-not-found");
} catch (e) {}
});
await run();

View File

@@ -1,10 +1,4 @@
import { realpathSync } from "node:fs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];
import { bench, run } from "./runner.mjs";
bench("realpathSync x " + count, () => {
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
});
await run();
for (let i = 0; i < count; i++) realpathSync(arg);

View File

@@ -1,15 +0,0 @@
// This mostly exists to check for a memory leak in response.clone()
import { bench, run } from "./runner.mjs";
const req = new Request("http://localhost:3000/");
const resp = await fetch("http://example.com");
bench("req.clone().url", () => {
return req.clone().url;
});
bench("resp.clone().url", () => {
return resp.clone().url;
});
await run();

View File

@@ -1,136 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
//
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
// Force the string to be 8bit
const str = String.fromCharCode(
...JSON.stringify(obj)
.split("")
.map(a => a.charCodeAt(0)),
);
var i = 0;
bench("new Response().arrayBuffer() (new string each call, latin1)", async () => {
return await new Response(str + i++).arrayBuffer();
});
bench("new Response().arrayBuffer() (new string each call, utf16)", async () => {
return await new Response(str + i++ + "😊").arrayBuffer();
});
bench("new Response().arrayBuffer() (existing string, latin1)", async () => {
return await new Response(str).arrayBuffer();
});
await run();

View File

@@ -1,123 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
bench("Response.json(obj)", async () => {
return Response.json(obj);
});
bench("Response.json(obj).json()", async () => {
return await Response.json(obj).json();
});
await run();

View File

@@ -1 +0,0 @@
for (let i = 0; i < 9999999; i++) new Request("http://aaaaaaaaaaaaaaaaaaaaa");

View File

@@ -1,37 +0,0 @@
import { bench, run } from "./runner.mjs";
const blob = new Blob(["<p id='foo'>Hello</p>"]);
bench("prepend", async () => {
await new HTMLRewriter()
.on("p", {
element(element) {
element.prepend("Hello");
},
})
.transform(new Response(blob))
.text();
});
bench("append", async () => {
await new HTMLRewriter()
.on("p", {
element(element) {
element.append("Hello");
},
})
.transform(new Response(blob))
.text();
});
bench("getAttribute", async () => {
await new HTMLRewriter()
.on("p", {
element(element) {
element.getAttribute("id");
},
})
.transform(new Response(blob))
.text();
});
await run();

View File

@@ -1,22 +0,0 @@
import { tmpdir } from "node:os";
import { promises, existsSync, mkdirSync } from "node:fs";
const count = 1024 * 12;
var queue = new Array(count);
var paths = new Array(count);
for (let i = 0; i < count; i++) {
const path = `${tmpdir()}/${Date.now()}.rm.dir${i}`;
try {
mkdirSync(path);
} catch (e) {}
paths[i] = path;
queue[i] = promises.rmdir(path);
}
await Promise.all(queue);
for (let i = 0; i < count; i++) {
if (existsSync(paths[i])) {
throw new Error(`Path ${paths[i]} was not removed`);
}
}

View File

@@ -1,128 +0,0 @@
import { serialize, deserialize } from "node:v8";
import { bench, run } from "./runner.mjs";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
bench("serialize", () => {
serialize(obj);
});
const serialized = serialize(obj);
bench("deserialize", () => {
deserialize(serialized);
});
if (typeof Bun !== "undefined") {
if (!Bun.deepEquals(obj, deserialize(serialized))) {
throw new Error("not equal");
}
}
await run();

View File

@@ -1,39 +0,0 @@
var testArray = [
{
description: "Random description.",
testNumber: 123456789,
testBoolean: true,
testObject: {
testString: "test string",
testNumber: 12345,
},
testArray: [
{
myName: "test name",
myNumber: 123245,
},
],
},
{
description: "Random description.",
testNumber: 123456789,
testBoolean: true,
testObject: {
testString: "test string",
testNumber: 12345,
},
testArray: [
{
myName: "test name",
myNumber: 123245,
},
],
},
];
import { bench, run } from "./runner.mjs";
bench("structuredClone(array)", () => structuredClone(testArray));
bench("structuredClone(123)", () => structuredClone(123));
bench("structuredClone({a: 123})", () => structuredClone({ a: 123 }));
await run();

View File

@@ -1,60 +0,0 @@
import { bench, run } from "./runner.mjs";
function deprecateUsingClosure(fn, msg, code) {
if (process.noDeprecation === true) {
return fn;
}
var realFn = fn;
var wrapper = () => {
return fnToWrap.apply(this, arguments);
};
var deprecater = () => {
if (process.throwDeprecation) {
var err = new Error(msg);
if (code) err.code = code;
throw err;
} else if (process.traceDeprecation) {
console.trace(msg);
} else {
console.error(msg);
}
fnToWrap = realFn;
return realFn.apply(this, arguments);
};
var fnToWrap = deprecater;
return wrapper;
}
function deprecateOriginal(fn, msg) {
var warned = false;
function deprecated() {
if (!warned) {
if (process.throwDeprecation) {
throw new Error(msg);
} else if (process.traceDeprecation) {
console.trace(msg);
} else {
console.error(msg);
}
warned = true;
}
return fn.apply(this, arguments);
}
return deprecated;
}
const deprecatedy = deprecateUsingClosure(() => {}, "This is deprecated", "DEP0001");
const deprecatedy2 = deprecateOriginal(() => {}, "This is deprecated");
bench("deprecateUsingClosure", () => {
deprecatedy(Math.random() + 1);
});
bench("deprecateOriginal", () => {
deprecatedy2(Math.random() + 1);
});
await run();

View File

@@ -1,20 +0,0 @@
import { group } from "mitata";
import { bench, run } from "./runner.mjs";
const sizes = [
["small (63 bytes)", 63],
["medium (4096 bytes)", 4096],
["large (64 MB)", 64 * 1024 * 1024],
];
for (let [name, size] of sizes) {
group(name, () => {
var buf = new Uint8Array(size);
for (let algorithm of ["SHA-1", "SHA-256", "SHA-384", "SHA-512"]) {
bench(algorithm, async () => {
await crypto.subtle.digest(algorithm, buf);
});
}
});
}
await run();

View File

@@ -9,8 +9,9 @@ bench("writeFile(/tmp/foo.txt, short string)", async () => {
await writeFile("/tmp/foo.txt", "short string", "utf8");
});
const buffer = Buffer.from("short string");
bench("writeFile(/tmp/foo.txt, Buffer.from(short string))", async () => {
await writeFile("/tmp/foo.txt", Buffer.from("short string"));
await writeFile("/tmp/foo.txt", buffer);
});
const fd = openSync("/tmp/foo.txt", "w");
@@ -21,7 +22,7 @@ bench("write(fd, short string)", () => {
});
bench("write(fd, Uint8Array(short string))", () => {
const bytesWritten = write(fd, Buffer.from("short string"));
const bytesWritten = write(fd, buffer);
if (bytesWritten !== 12) throw new Error("wrote !== 12");
});

Binary file not shown.

View File

@@ -1,7 +1,7 @@
{
"name": "bench",
"dependencies": {
"better-sqlite3": "8.5.0"
"better-sqlite3": "^8.0.1"
},
"scripts": {
"build": "exit 0",

View File

@@ -2,7 +2,7 @@
This benchmarks a websocket server intended as a simple but very active chat room.
First, start the server. By default, it will wait for 32 clients which the client script will handle.
First, start the server. By default, it will wait for 16 clients which the client script will handle.
Run in Bun (`Bun.serve`):
@@ -19,10 +19,10 @@ node ./chat-server.node.mjs
Run in Deno (`Deno.serve`):
```bash
deno run -A ./chat-server.deno.mjs
deno run -A --unstable ./chat-server.deno.mjs
```
Then, run the client script. By default, it will connect 32 clients. This client script can run in Bun, Node, or Deno
Then, run the client script. By default, it will connect 16 clients. This client script can run in Bun, Node, or Deno
```bash
node ./chat-client.mjs
@@ -34,4 +34,6 @@ For example, when the client sends `"foo"`, the server sends back `"John: foo"`
The client script waits until it receives all the messages for each client before sending the next batch of messages.
TODO: once Deno lands their performance improvements, increase the client count (it was originally going to be 32 or 64, but that would've exluded Deno from the benchmark)
This project was created using `bun init` in bun v0.2.1. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

Binary file not shown.

View File

@@ -3,7 +3,7 @@ const env = "process" in globalThis ? process.env : "Deno" in globalThis ? Deno.
const SERVER = env.SERVER || "ws://0.0.0.0:4001";
const WebSocket = globalThis.WebSocket || (await import("ws")).WebSocket;
const LOG_MESSAGES = env.LOG_MESSAGES === "1";
const CLIENTS_TO_WAIT_FOR = parseInt(env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(env.CLIENTS_COUNT || "", 10) || 16;
const DELAY = 64;
const MESSAGES_TO_SEND = Array.from({ length: 32 }, () => [
"Hello World!",

View File

@@ -1,5 +1,5 @@
// See ./README.md for instructions on how to run this benchmark.
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 16;
var remainingClients = CLIENTS_TO_WAIT_FOR;
const COMPRESS = process.env.COMPRESS === "1";
const port = process.PORT || 4001;
@@ -32,7 +32,6 @@ const server = Bun.serve({
},
perMessageDeflate: false,
publishToSelf: true,
},
fetch(req, server) {

View File

@@ -1,6 +1,6 @@
// See ./README.md for instructions on how to run this benchmark.
const port = Deno.env.get("PORT") || 4001;
const CLIENTS_TO_WAIT_FOR = parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 16;
var clients = [];
async function reqHandler(req) {
@@ -42,4 +42,4 @@ function sendReadyMessage() {
console.log(`Waiting for ${CLIENTS_TO_WAIT_FOR} clients to connect..`);
Deno.serve({ port }, reqHandler);
Deno.serve(reqHandler, { port });

View File

@@ -1,6 +1,6 @@
// See ./README.md for instructions on how to run this benchmark.
const port = process.env.PORT || 4001;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 16;
import { createRequire } from "module";
const require = createRequire(import.meta.url);

View File

@@ -3,8 +3,8 @@
"module": "index.ts",
"type": "module",
"dependencies": {
"bufferutil": "4.0.7",
"utf-8-validate": "6.0.3",
"ws": "8.13.0"
"bufferutil": "^4.0.7",
"utf-8-validate": "^5.0.10",
"ws": "^8.9.0"
}
}

273
build.zig
View File

@@ -1,9 +1,5 @@
const required_zig_version = "0.12.0-dev.899+027aabf49";
const std = @import("std");
const pathRel = std.fs.path.relative;
const Wyhash = @import("./src/wyhash.zig").Wyhash;
var is_debug_build = false;
fn moduleSource(comptime out: []const u8) FileSource {
if (comptime std.fs.path.dirname(@src().file)) |base| {
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
@@ -12,10 +8,26 @@ fn moduleSource(comptime out: []const u8) FileSource {
return FileSource.relative(out);
}
}
pub fn addPicoHTTP(step: *CompileStep, comptime with_obj: bool) void {
step.addIncludePath("src/deps");
fn exists(path: []const u8) bool {
_ = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
return true;
if (with_obj) {
step.addObjectFile("src/deps/picohttpparser.o");
}
step.addIncludePath("src/deps");
if (with_obj) {
step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o"));
step.addObjectFile(panicIfNotFound("src/deps/libssl.a"));
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
}
// step.add("/Users/jarred/Code/WebKit/WebKitBuild/Release/lib/libWTF.a");
// ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON"
// set -gx ICU_INCLUDE_DIRS "/usr/local/opt/icu4c/include"
// homebrew-provided icu4c
}
const color_map = std.ComptimeStringMap([]const u8, .{
@@ -41,10 +53,6 @@ fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: [
break :brk b.createModule(.{
.source_file = FileSource.relative("src/io/io_linux.zig"),
});
} else if (target.isWindows()) {
break :brk b.createModule(.{
.source_file = FileSource.relative("src/io/io_windows.zig"),
});
}
break :brk b.createModule(.{
@@ -53,37 +61,11 @@ fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: [
};
step.addModule("async_io", io);
step.addModule("zlib-internal", brk: {
if (target.isWindows()) {
break :brk b.createModule(.{ .source_file = FileSource.relative("src/deps/zlib.win32.zig") });
}
break :brk b.createModule(.{ .source_file = FileSource.relative("src/deps/zlib.posix.zig") });
});
var async_: *Module = brk: {
if (target.isDarwin() or target.isLinux() or target.isFreeBSD()) {
break :brk b.createModule(.{
.source_file = FileSource.relative("src/async/posix_event_loop.zig"),
});
} else if (target.isWindows()) {
break :brk b.createModule(.{
.source_file = FileSource.relative("src/async/windows_event_loop.zig"),
});
}
break :brk b.createModule(.{
.source_file = FileSource.relative("src/async/stub_event_loop.zig"),
});
};
step.addModule("async", async_);
}
const BunBuildOptions = struct {
canary: bool = false,
sha: [:0]const u8 = "",
version: []const u8 = "",
baseline: bool = false,
bindgen: bool = false,
sizegen: bool = false,
@@ -92,44 +74,24 @@ const BunBuildOptions = struct {
runtime_js_version: u64 = 0,
fallback_html_version: u64 = 0,
tinycc: bool = true,
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const runtime_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
this.runtime_js_version = runtime_hash;
} else |_| {
if (!is_debug_build) {
@panic("Runtime file was not read successfully. Please run `make setup`");
}
}
if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const fallback_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
this.fallback_html_version = fallback_hash;
} else |_| {
if (!is_debug_build) {
@panic("Fallback file was not read successfully. Please run `make setup`");
}
}
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
const runtime_hash = std.hash.Wyhash.hash(
0,
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
);
this.runtime_js_version = runtime_hash;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
);
this.fallback_html_version = fallback_hash;
}
pub fn step(this: BunBuildOptions, b: anytype) *std.build.OptionsStep {
var opts = b.addOptions();
opts.addOption(@TypeOf(this.canary), "is_canary", this.canary);
opts.addOption(
std.SemanticVersion,
"version",
std.SemanticVersion.parse(this.version) catch @panic(b.fmt("Invalid version: {s}", .{this.version})),
);
opts.addOption(@TypeOf(this.sha), "sha", this.sha);
opts.addOption(@TypeOf(this.baseline), "baseline", this.baseline);
opts.addOption(@TypeOf(this.bindgen), "bindgen", this.bindgen);
@@ -137,12 +99,10 @@ const BunBuildOptions = struct {
opts.addOption(@TypeOf(this.base_path), "base_path", this.base_path);
opts.addOption(@TypeOf(this.runtime_js_version), "runtime_js_version", this.runtime_js_version);
opts.addOption(@TypeOf(this.fallback_html_version), "fallback_html_version", this.fallback_html_version);
opts.addOption(@TypeOf(this.tinycc), "tinycc", this.tinycc);
return opts;
}
};
// relative to the prefix
var output_dir: []const u8 = "";
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
var file = std.fs.cwd().openFile(filepath, .{ .optimize = .read_only }) catch |err| {
@@ -177,30 +137,6 @@ const Module = std.build.Module;
const fs = std.fs;
pub fn build(b: *Build) !void {
build_(b) catch |err| {
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);
}
return err;
};
}
pub fn build_(b: *Build) !void {
if (!std.mem.eql(u8, @import("builtin").zig_version_string, required_zig_version)) {
const colors = std.io.getStdErr().supportsAnsiEscapeCodes();
std.debug.print(
"{s}WARNING:\nBun requires Zig version '{s}', but found '{s}', build may fail...\nMake sure you installed the right version as per https://bun.sh/docs/project/contributing#install-zig\n{s}You can update to the right version using 'zigup {s}'\n\n",
.{
if (colors) "\x1b[1;33m" else "",
required_zig_version,
@import("builtin").zig_version_string,
if (colors) "\x1b[0m" else "",
required_zig_version,
},
);
}
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
@@ -210,8 +146,6 @@ pub fn build_(b: *Build) !void {
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
optimize = b.standardOptimizeOption(.{});
const generated_code_directory = b.option([]const u8, "generated-code", "Set the generated code directory") orelse "./";
var output_dir_buf = std.mem.zeroes([4096]u8);
var bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
@@ -244,26 +178,26 @@ pub fn build_(b: *Build) !void {
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
if (b.option([]const u8, "output-dir", "target to install to") orelse b.env_map.get("OUTPUT_DIR")) |output_dir_| {
output_dir = try pathRel(b.allocator, b.install_prefix, output_dir_);
if (b.option([]const u8, "output-dir", "target to install to") orelse std.os.getenv("OUTPUT_DIR")) |output_dir_| {
output_dir = b.pathFromRoot(output_dir_);
} else {
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
output_dir = try pathRel(b.allocator, b.install_prefix, output_dir_base);
output_dir = b.pathFromRoot(output_dir_base);
}
is_debug_build = optimize == OptimizeMode.Debug;
std.fs.cwd().makePath(output_dir) catch {};
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
"root_wasm.zig"
"src/main_wasm.zig"
else
"root.zig";
const min_version: std.SemanticVersion = if (!(target.isWindows() or target.getOsTag() == .freestanding))
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMin().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
const max_version: std.SemanticVersion = if (!(target.isWindows() or target.getOsTag() == .freestanding))
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMax().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
@@ -274,26 +208,8 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative(root_src),
.target = target,
.optimize = optimize,
.main_mod_path = .{ .cwd_relative = b.pathFromRoot(".") },
});
if (!exists(b.pathFromRoot(try std.fs.path.join(b.allocator, &.{
"src",
"js_lexer",
"id_continue_bitset.blob",
})))) {
const identifier_data = b.pathFromRoot(try std.fs.path.join(b.allocator, &.{ "src", "js_lexer", "identifier_data.zig" }));
var run_step = b.addSystemCommand(&.{
b.zig_exe,
"run",
identifier_data,
});
run_step.has_side_effects = true;
obj.step.dependOn(&run_step.step);
}
b.reference_trace = 16;
var default_build_options: BunBuildOptions = brk: {
const is_baseline = arch.isX86() and (target.cpu_model == .baseline or
!std.Target.x86.featureSetHas(target.getCpuFeatures(), .avx2));
@@ -308,6 +224,7 @@ pub fn build_(b: *Build) !void {
.argv = &.{
"git",
"rev-parse",
"--short",
"HEAD",
},
.cwd = b.pathFromRoot("."),
@@ -318,12 +235,9 @@ pub fn build_(b: *Build) !void {
}
}
const is_canary =
b.option(bool, "canary", "Treat this as a canary build") orelse
((b.env_map.get("BUN_CANARY") orelse "0")[0] == '1');
const is_canary = (std.os.getenvZ("BUN_CANARY") orelse "0")[0] == '1';
break :brk .{
.canary = is_canary,
.version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0",
.sha = git_sha,
.baseline = is_baseline,
.bindgen = false,
@@ -332,6 +246,9 @@ pub fn build_(b: *Build) !void {
};
{
addPicoHTTP(obj, false);
obj.setMainPkgPath(b.pathFromRoot("."));
try addInternalPackages(
b,
obj,
@@ -362,15 +279,9 @@ pub fn build_(b: *Build) !void {
std.io.getStdErr().writer().print("Output: {s}/{s}\n\n", .{ output_dir, bun_executable_name }) catch unreachable;
defer obj_step.dependOn(&obj.step);
var install = b.addInstallFileWithDir(
obj.getEmittedBin(),
.{ .custom = output_dir },
b.fmt("{s}.o", .{bun_executable_name}),
);
install.step.dependOn(&obj.step);
obj_step.dependOn(&install.step);
obj.emit_bin = .{
.emit_to = b.fmt("{s}/{s}.o", .{ output_dir, bun_executable_name }),
};
var actual_build_options = default_build_options;
if (b.option(bool, "generate-sizes", "Generate sizes of things") orelse false) {
actual_build_options.sizegen = true;
@@ -378,25 +289,16 @@ pub fn build_(b: *Build) !void {
obj.addOptions("build_options", actual_build_options.step(b));
// Generated Code
obj.addModule("generated/ZigGeneratedClasses.zig", b.createModule(.{
.source_file = .{ .path = b.fmt("{s}/ZigGeneratedClasses.zig", .{generated_code_directory}) },
}));
obj.addModule("generated/ResolvedSourceTag.zig", b.createModule(.{
.source_file = .{ .path = b.fmt("{s}/ResolvedSourceTag.zig", .{generated_code_directory}) },
}));
obj.linkLibC();
obj.dll_export_fns = true;
obj.strip = false;
obj.bundle_compiler_rt = false;
obj.omit_frame_pointer = optimize != .Debug;
obj.subsystem = .Console;
// Disable stack probing on x86 so we don't need to include compiler_rt
if (target.getCpuArch().isX86() or target.isWindows()) obj.disable_stack_probing = true;
if (target.getCpuArch().isX86()) obj.disable_stack_probing = true;
if (b.option(bool, "for-editor", "Do not emit bin, just check for errors") orelse false) {
// obj.emit_bin = .no_emit;
obj.generated_bin = null;
obj.emit_bin = .no_emit;
}
if (target.getOsTag() == .linux) {
@@ -414,10 +316,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("src/bindgen.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
var headers_build_options = default_build_options;
headers_build_options.bindgen = true;
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -425,22 +326,19 @@ pub fn build_(b: *Build) !void {
}
{
const wasm_step = b.step("bun-wasm", "Build WASM");
var wasm = b.addStaticLibrary(.{
const wasm = b.step("bun-wasm", "Build WASM");
var wasm_step = b.addStaticLibrary(.{
.name = "bun-wasm",
.root_source_file = FileSource.relative("root_wasm.zig"),
.root_source_file = FileSource.relative("src/main_wasm.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer wasm_step.dependOn(&wasm.step);
wasm.strip = false;
defer wasm.dependOn(&wasm_step.step);
wasm_step.strip = false;
// wasm_step.link_function_sections = true;
// wasm_step.link_emit_relocs = true;
// wasm_step.single_threaded = true;
try configureObjectStep(b, wasm, wasm_step, @TypeOf(target), target);
var build_opts = default_build_options;
wasm.addOptions("build_options", build_opts.step(b));
try configureObjectStep(b, wasm_step, @TypeOf(target), target, obj.main_pkg_path.?);
}
{
@@ -450,10 +348,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("misctools/http_bench.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -464,10 +361,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("misctools/machbench.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -478,10 +374,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("misctools/fetch.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -492,10 +387,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("src/bench/string-handling.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -506,10 +400,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("src/sha.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -520,10 +413,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("src/sourcemap/vlq_bench.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -534,10 +426,9 @@ pub fn build_(b: *Build) !void {
.root_source_file = FileSource.relative("misctools/tgz.zig"),
.target = target,
.optimize = optimize,
.main_mod_path = obj.main_mod_path,
});
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -551,23 +442,16 @@ pub fn build_(b: *Build) !void {
var headers_obj: *CompileStep = b.addTest(.{
.root_source_file = FileSource.relative(test_file orelse "src/main.zig"),
.target = target,
.main_mod_path = obj.main_mod_path,
});
headers_obj.filter = test_filter;
if (test_bin_) |test_bin| {
headers_obj.name = std.fs.path.basename(test_bin);
if (std.fs.path.dirname(test_bin)) |dir| {
var install = b.addInstallFileWithDir(
headers_obj.getEmittedBin(),
.{ .custom = try std.fs.path.relative(b.allocator, output_dir, dir) },
headers_obj.name,
);
install.step.dependOn(&headers_obj.step);
headers_step.dependOn(&install.step);
}
if (std.fs.path.dirname(test_bin)) |dir| headers_obj.emit_bin = .{
.emit_to = b.fmt("{s}/{s}", .{ dir, headers_obj.name }),
};
}
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_step.dependOn(&headers_obj.step);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -578,24 +462,23 @@ pub fn build_(b: *Build) !void {
pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = null;
pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, obj_step: *std.build.Step, comptime Target: type, target: Target) !void {
pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, comptime Target: type, target: Target, main_pkg_path: []const u8) !void {
obj.setMainPkgPath(main_pkg_path);
// obj.setTarget(target);
try addInternalPackages(b, obj, std.heap.page_allocator, b.zig_exe, target);
if (target.getOsTag() != .freestanding)
addPicoHTTP(obj, false);
obj.strip = false;
// obj.setBuildMode(optimize);
obj.bundle_compiler_rt = false;
if (obj.emit_directory == null) {
var install = b.addInstallFileWithDir(
obj.getEmittedBin(),
.{ .custom = output_dir },
b.fmt("{s}.o", .{obj.name}),
);
if (obj.emit_bin == .default)
obj.emit_bin = .{
.emit_to = b.fmt("{s}/{s}.o", .{ output_dir, obj.name }),
};
install.step.dependOn(&obj.step);
obj_step.dependOn(&install.step);
}
if (target.getOsTag() != .freestanding) obj.linkLibC();
if (target.getOsTag() != .freestanding) obj.bundle_compiler_rt = false;

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,8 +1,8 @@
[test]
# Large monorepos (like Bun) may want to specify the test directory more specifically
# By default, `bun test` scans every single folder recursively which, if you
# have a gigantic submodule (like WebKit), requires lots of directory
# By default, `bun test` scans every single folder recurisvely which, if you
# have a gigantic submodule (like WebKit), it has to do lots of directory
# traversals
#
# Instead, we can only scan the test directory for Bun's runtime tests
# Instead, we can just make it scan only the test directory for Bun's runtime tests
root = "test"

View File

@@ -92,12 +92,12 @@ _bun_completions() {
PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]="";
PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]="";
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --frozen-lockfile --no-save --dry-run --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
PACKAGE_OPTIONS[SHARED_OPTIONS_SHORT]="-c -y -p -f -g";
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --frozen-lockfile --no-save --dry-run --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
PM_OPTIONS[SHORT_OPTIONS]="-c -y -p -f -g"
local cur_word="${COMP_WORDS[${COMP_CWORD}]}";
local prev="${COMP_WORDS[$(( COMP_CWORD - 1 ))]}";

File diff suppressed because it is too large Load Diff

View File

@@ -115,10 +115,12 @@ subcommands:
- yarn -- "Write a yarn.lock file (yarn v1)"
- production -- "Don't install devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
- name: lockfile
type: string
summary: "Store & load a lockfile at a specific filepath"
- name: cache-dir
type: string
summary: "Store & load cached data from a specific directory path"
@@ -150,13 +152,15 @@ subcommands:
- development -- "Add dependency to devDependencies"
- d -- "Add dependency to devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
- no-cache -- "Ignore manifest cache entirely"
- silent -- "Don't output anything"
- verbose -- "Excessively verbose logging"
- name: lockfile
type: string
summary: "Store & load a lockfile at a specific filepath"
- name: cache-dir
type: string
summary: "Store & load cached data from a specific directory path"
@@ -188,10 +192,12 @@ subcommands:
- yarn -- "Write a yarn.lock file (yarn v1)"
- production -- "Don't install devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
- name: lockfile
type: string
summary: "Store & load a lockfile at a specific filepath"
- name: cache-dir
type: string
summary: "Store & load cached data from a specific directory path"

View File

@@ -0,0 +1,29 @@
# bun:alpine
# Not officially supported (yet)
ARG GLIBC_RELEASE=2.35-r0
FROM alpine:latest AS build
WORKDIR /tmp
RUN apk --no-cache add unzip
ARG GLIBC_RELEASE
RUN wget https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_RELEASE}/glibc-${GLIBC_RELEASE}.apk
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
RUN unzip bun-linux-x64.zip
FROM alpine:latest
ARG GLIBC_RELEASE
COPY --from=build /tmp/sgerrand.rsa.pub /etc/apk/keys
COPY --from=build /tmp/glibc-${GLIBC_RELEASE}.apk /tmp
COPY --from=build /tmp/bun-linux-x64/bun /usr/local/bin
RUN apk --no-cache --force-overwrite add /tmp/glibc-${GLIBC_RELEASE}.apk \
&& rm /etc/apk/keys/sgerrand.rsa.pub \
&& rm /tmp/glibc-${GLIBC_RELEASE}.apk
RUN bun --version

View File

@@ -0,0 +1,77 @@
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM debian:bullseye-slim
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin
COPY --from=build /usr/local/bin/bunx /usr/local/bin
WORKDIR /home/bun/app
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
CMD ["/usr/local/bin/bun"]

View File

@@ -0,0 +1,68 @@
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM gcr.io/distroless/base-nossl-debian11
COPY --from=build /usr/local/bin/bun /usr/local/bin
COPY --from=build /usr/local/bin/bunx /usr/local/bin
WORKDIR /app
ENTRYPOINT ["/usr/local/bin/bun"]
CMD ["/usr/local/bin/bun"]

View File

@@ -1,113 +0,0 @@
FROM alpine:3.18 AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
# TODO: Instead of downloading glibc from a third-party source, we should
# build it from source. This is a temporary solution.
# See: https://github.com/sgerrand/alpine-pkg-glibc
# https://github.com/sgerrand/alpine-pkg-glibc/releases
# https://github.com/sgerrand/alpine-pkg-glibc/issues/176
ARG GLIBC_VERSION=2.34-r0
# https://github.com/oven-sh/bun/issues/5545#issuecomment-1722461083
ARG GLIBC_VERSION_AARCH64=2.26-r1
RUN apk --no-cache add \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& arch="$(apk --print-arch)" \
&& case "${arch##*-}" in \
x86_64) build="x64-baseline";; \
aarch64) build="aarch64";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: $tag" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify: $tag" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify: $tag" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& cd /tmp \
&& case "${arch##*-}" in \
x86_64) curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc v${GLIBC_VERSION}" && exit 1) \
&& mv "glibc-${GLIBC_VERSION}.apk" glibc.apk \
&& curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION}" && exit 1) \
&& mv "glibc-bin-${GLIBC_VERSION}.apk" glibc-bin.apk ;; \
aarch64) curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-${GLIBC_VERSION_AARCH64}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc v${GLIBC_VERSION_AARCH64}" && exit 1) \
&& mv "glibc-${GLIBC_VERSION_AARCH64}.apk" glibc.apk \
&& curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-bin-${GLIBC_VERSION_AARCH64}.apk" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION_AARCH64}" && exit 1) \
&& mv "glibc-bin-${GLIBC_VERSION_AARCH64}.apk" glibc-bin.apk ;; \
*) echo "error: unsupported architecture '$arch'"; exit 1 ;; \
esac
FROM alpine:3.18
COPY --from=build /tmp/glibc.apk /tmp/
COPY --from=build /tmp/glibc-bin.apk /tmp/
COPY --from=build /usr/local/bin/bun /usr/local/bin/
COPY docker-entrypoint.sh /usr/local/bin/
RUN addgroup -g 1000 bun \
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
&& apk --no-cache --force-overwrite --allow-untrusted add \
/tmp/glibc.apk \
/tmp/glibc-bin.apk \
&& rm /tmp/glibc.apk \
&& rm /tmp/glibc-bin.apk \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
WORKDIR /home/bun/app
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
CMD ["/usr/local/bin/bun"]

View File

@@ -1,7 +1,7 @@
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
ARG BUN_VERSION=0.5.7
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
@@ -17,7 +17,7 @@ RUN apt-get update -qq \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
@@ -33,44 +33,44 @@ RUN apt-get update -qq \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: $tag" && exit 1) \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify: $tag" && exit 1) \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify: $tag" && exit 1) \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM debian:bullseye-slim
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
--create-home
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin
COPY --from=build /usr/local/bin/bunx /usr/local/bin
WORKDIR /home/bun/app
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]

View File

@@ -1,7 +1,7 @@
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
ARG BUN_VERSION=0.5.7
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
@@ -17,7 +17,7 @@ RUN apt-get update -qq \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
@@ -33,42 +33,44 @@ RUN apt-get update -qq \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: $tag" && exit 1) \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify: $tag" && exit 1) \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify: $tag" && exit 1) \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM debian:bullseye
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
--create-home
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin
COPY --from=build /usr/local/bin/bunx /usr/local/bin
WORKDIR /home/bun/app
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]

View File

@@ -1,69 +0,0 @@
FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: failed to download: $tag" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify: $tag" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify: $tag" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& which bun \
&& bun --version
FROM gcr.io/distroless/base-nossl-debian11
COPY --from=build /usr/local/bin/bun /usr/local/bin/
# Temporarily use the `build`-stage image binaries to create a symlink:
RUN --mount=type=bind,from=build,source=/usr/bin,target=/usr/bin \
--mount=type=bind,from=build,source=/bin,target=/bin <<EOF
ln -s /usr/local/bin/bun /usr/local/bin/bunx
which bunx
EOF
ENTRYPOINT ["/usr/local/bin/bun"]

Some files were not shown because too many files have changed in this diff Show More