mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Revert "Convert build scripts to CMake (#13427)"
This reverts commit 354df17d16.
This commit is contained in:
92
scripts/all-dependencies.ps1
Executable file
92
scripts/all-dependencies.ps1
Executable file
@@ -0,0 +1,92 @@
|
||||
param(
|
||||
[Alias("f")][switch]$Force = $false
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
if ($env:CI -eq "true") {
|
||||
& (Join-Path $PSScriptRoot "update-submodules.ps1")
|
||||
}
|
||||
|
||||
if ($env:RELEASE -eq "1") {
|
||||
$Force = $true
|
||||
}
|
||||
|
||||
$DidAnything = $false;
|
||||
|
||||
function Build-Dependency {
|
||||
param(
|
||||
$Script,
|
||||
[string[]]$Outputs
|
||||
)
|
||||
|
||||
$ScriptPath = Join-Path $PSScriptRoot "build-$Script.ps1"
|
||||
|
||||
if (!$Force) {
|
||||
foreach ($Output in $Outputs) {
|
||||
$OutputPath = Join-Path $BUN_DEPS_OUT_DIR $Output
|
||||
if (Test-Path $OutputPath) {
|
||||
Write-Host "$Script - already built"
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
Remove-Item $Outputs -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
Write-Host "$Script - Building"
|
||||
Push-Location $PSScriptRoot
|
||||
try {
|
||||
& $ScriptPath
|
||||
}
|
||||
catch {
|
||||
Write-Host "Failed to build $Script"
|
||||
throw $_
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
$Script:DidAnything = $true
|
||||
}
|
||||
|
||||
Build-Dependency `
|
||||
-Script "boringssl" `
|
||||
-Outputs @("crypto.lib", "ssl.lib", "decrepit.lib")
|
||||
Build-Dependency `
|
||||
-Script "cares" `
|
||||
-Outputs @("cares.lib")
|
||||
Build-Dependency `
|
||||
-Script "zlib" `
|
||||
-Outputs @("zlib.lib")
|
||||
Build-Dependency `
|
||||
-Script "libarchive" `
|
||||
-Outputs @("archive.lib")
|
||||
Build-Dependency `
|
||||
-Script "lolhtml" `
|
||||
-Outputs @("lolhtml.lib")
|
||||
Build-Dependency `
|
||||
-Script "mimalloc" `
|
||||
-Outputs @("mimalloc.lib")
|
||||
Build-Dependency `
|
||||
-Script "tinycc" `
|
||||
-Outputs @("tcc.lib")
|
||||
Build-Dependency `
|
||||
-Script "zstd" `
|
||||
-Outputs @("zstd.lib")
|
||||
Build-Dependency `
|
||||
-Script "libuv" `
|
||||
-Outputs @("libuv.lib")
|
||||
Build-Dependency `
|
||||
-Script "lshpack" `
|
||||
-Outputs @("lshpack.lib")
|
||||
|
||||
Build-Dependency `
|
||||
-Script "libdeflate" `
|
||||
-Outputs @("deflate.lib")
|
||||
|
||||
if (!($Script:DidAnything)) {
|
||||
Write-Host "(run with -Force to rebuild all)"
|
||||
}
|
||||
109
scripts/all-dependencies.sh
Executable file
109
scripts/all-dependencies.sh
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
|
||||
|
||||
RELEASE="${RELEASE:-0}"
|
||||
CI="${CI:-}"
|
||||
BUILT_ANY=0
|
||||
SUBMODULES=
|
||||
CACHE_DIR=
|
||||
CACHE=0
|
||||
BUN_DEPS_CACHE_DIR="${BUN_DEPS_CACHE_DIR:-}"
|
||||
|
||||
if [[ "$CI" ]]; then
|
||||
$(dirname -- "${BASH_SOURCE[0]}")/update-submodules.sh
|
||||
fi
|
||||
|
||||
FORCE=
|
||||
|
||||
while getopts "f" opt; do
|
||||
case ${opt} in
|
||||
f)
|
||||
FORCE=1
|
||||
;;
|
||||
\?)
|
||||
echo "Usage: all-dependencies.sh [-h] [-f]"
|
||||
echo "Options:"
|
||||
echo " h Print this help message"
|
||||
echo " f Set force to 1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
FORCE=1
|
||||
elif [ -n "$BUN_DEPS_CACHE_DIR" ]; then
|
||||
CACHE_DIR="$BUN_DEPS_CACHE_DIR"
|
||||
CACHE=1
|
||||
SUBMODULES="$(git submodule status)"
|
||||
fi
|
||||
|
||||
dep() {
|
||||
local submodule="$1"
|
||||
local script="$2"
|
||||
if [ "$CACHE" == "1" ]; then
|
||||
local hash="$(echo "$SUBMODULES" | grep "$submodule" | awk '{print $1}')"
|
||||
local os="$(uname -s | tr '[:upper:]' '[:lower:]')"
|
||||
local arch="$(uname -m)"
|
||||
CACHE_KEY="$submodule/$hash-$os-$arch-$CPU_TARGET"
|
||||
mkdir -p "$CACHE_DIR/$CACHE_KEY"
|
||||
fi
|
||||
if [ -z "$FORCE" ]; then
|
||||
HAS_ALL_DEPS=1
|
||||
shift
|
||||
for lib in "${@:2}"; do
|
||||
if [ ! -f "$BUN_DEPS_OUT_DIR/$lib" ]; then
|
||||
if [[ "$CACHE" == "1" && -f "$CACHE_DIR/$CACHE_KEY/$lib" && "$script" != "libarchive" ]]; then
|
||||
mkdir -p "$BUN_DEPS_OUT_DIR"
|
||||
cp "$CACHE_DIR/$CACHE_KEY/$lib" "$BUN_DEPS_OUT_DIR/$lib"
|
||||
printf "%s %s - already cached\n" "$script" "$lib"
|
||||
else
|
||||
HAS_ALL_DEPS=0
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [ "$HAS_ALL_DEPS" == "1" ]; then
|
||||
printf "%s - already built\n" "$script"
|
||||
return
|
||||
fi
|
||||
fi
|
||||
printf "building %s\n" "$script"
|
||||
|
||||
set +e
|
||||
bash "$SCRIPT_DIR/build-$script.sh"
|
||||
EXIT=$?
|
||||
set -e
|
||||
|
||||
if [ "$EXIT" -ne 0 ]; then
|
||||
printf "Failed to build %s\n" "$script"
|
||||
exit "$EXIT"
|
||||
fi
|
||||
|
||||
if [ "$CACHE" == "1" ]; then
|
||||
mkdir -p "$CACHE_DIR/$CACHE_KEY"
|
||||
for lib in "${@:2}"; do
|
||||
cp "$BUN_DEPS_OUT_DIR/$lib" "$CACHE_DIR/$CACHE_KEY/$lib"
|
||||
printf "%s %s - cached\n" "$script" "$lib"
|
||||
done
|
||||
fi
|
||||
|
||||
BUILT_ANY=1
|
||||
}
|
||||
|
||||
dep boringssl boringssl libcrypto.a libssl.a libdecrepit.a
|
||||
dep c-ares cares libcares.a
|
||||
dep zlib zlib libz.a # Zlib must come before libarchive.
|
||||
dep libarchive libarchive libarchive.a
|
||||
dep lol-html lolhtml liblolhtml.a
|
||||
dep mimalloc mimalloc-debug libmimalloc-debug.a libmimalloc-debug.o
|
||||
dep mimalloc mimalloc libmimalloc.a libmimalloc.o
|
||||
dep tinycc tinycc libtcc.a
|
||||
dep zstd zstd libzstd.a
|
||||
dep libdeflate libdeflate libdeflate.a
|
||||
dep ls-hpack lshpack liblshpack.a
|
||||
|
||||
if [ "$BUILT_ANY" -eq 0 ]; then
|
||||
printf "(run with -f to rebuild)\n"
|
||||
fi
|
||||
16
scripts/build-boringssl.ps1
Executable file
16
scripts/build-boringssl.ps1
Executable file
@@ -0,0 +1,16 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'boringssl')
|
||||
try {
|
||||
Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake @CMAKE_FLAGS ..
|
||||
Run cmake --build . --target crypto --target ssl --target decrepit --clean-first --config Release
|
||||
|
||||
Copy-Item crypto/crypto.lib $BUN_DEPS_OUT_DIR
|
||||
Copy-Item ssl/ssl.lib $BUN_DEPS_OUT_DIR
|
||||
Copy-Item decrepit/decrepit.lib $BUN_DEPS_OUT_DIR
|
||||
Write-Host "-> crypto.lib, ssl.lib, decrepit.lib"
|
||||
} finally { Pop-Location }
|
||||
14
scripts/build-boringssl.sh
Executable file
14
scripts/build-boringssl.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
cd $BUN_DEPS_DIR/boringssl
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
cmake "${CMAKE_FLAGS[@]}" -GNinja ..
|
||||
ninja libcrypto.a libssl.a libdecrepit.a
|
||||
|
||||
cp **/libcrypto.a $BUN_DEPS_OUT_DIR/libcrypto.a
|
||||
cp **/libssl.a $BUN_DEPS_OUT_DIR/libssl.a
|
||||
cp **/libdecrepit.a $BUN_DEPS_OUT_DIR/libdecrepit.a
|
||||
29
scripts/build-bun-cpp.ps1
Executable file
29
scripts/build-bun-cpp.ps1
Executable file
@@ -0,0 +1,29 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
if ($env:CI -eq "true") {
|
||||
$env:FORCE_UPDATE_SUBMODULES = "1"
|
||||
& (Join-Path $PSScriptRoot "update-submodules.ps1")
|
||||
& (Join-Path $PSScriptRoot "build-libuv.ps1") -CloneOnly $True
|
||||
}
|
||||
|
||||
cd build
|
||||
cmake .. @CMAKE_FLAGS `
|
||||
-G Ninja `
|
||||
-DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=0 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
-DBUN_CPP_ONLY=1
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
|
||||
.\compile-cpp-only.ps1 -v -j $env:CPUS
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
|
||||
# HACK: For some reason, the buildkite agent is hanging when uploading bun-cpp-objects.a
|
||||
# Best guess is that there is an issue when uploading files larger than 500 MB
|
||||
#
|
||||
# For now, use FileSplitter to split the file into smaller chunks:
|
||||
# https://www.powershellgallery.com/packages/FileSplitter/1.3
|
||||
if ($env:BUILDKITE) {
|
||||
Split-File -Path (Resolve-Path "bun-cpp-objects.a") -PartSizeBytes "50MB" -Verbose
|
||||
}
|
||||
16
scripts/build-cares.ps1
Executable file
16
scripts/build-cares.ps1
Executable file
@@ -0,0 +1,16 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'c-ares')
|
||||
try {
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake @CMAKE_FLAGS -DCARES_STATIC=ON -DCARES_SHARED=OFF ..
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item lib\cares.lib $BUN_DEPS_OUT_DIR
|
||||
Write-Host "-> cares.lib"
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
24
scripts/build-cares.sh
Executable file
24
scripts/build-cares.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
|
||||
export FORCE_PIC=1
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
cd $BUN_DEPS_DIR/c-ares
|
||||
|
||||
rm -rf build CMakeCache.txt CMakeFiles
|
||||
mkdir -p build
|
||||
|
||||
cd build
|
||||
|
||||
cmake "${CMAKE_FLAGS[@]}" .. \
|
||||
-DCMAKE_INSTALL_LIBDIR=lib \
|
||||
-DCARES_STATIC=ON \
|
||||
-DCARES_STATIC_PIC=OFF \
|
||||
-DCARES_SHARED=OFF \
|
||||
-DCARES_BUILD_TOOLS=ON \
|
||||
-G "Ninja"
|
||||
|
||||
ninja
|
||||
|
||||
cp lib/libcares.a $BUN_DEPS_OUT_DIR/libcares.a
|
||||
17
scripts/build-libarchive.ps1
Executable file
17
scripts/build-libarchive.ps1
Executable file
@@ -0,0 +1,17 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'libarchive')
|
||||
try {
|
||||
Remove-Item -Recurse -Force libarchive-build -ErrorAction SilentlyContinue
|
||||
Set-Location (mkdir -Force libarchive-build)
|
||||
|
||||
Run cmake @CMAKE_FLAGS -DBUILD_SHARED_LIBS=0 -DENABLE_BZIP2=0 -DENABLE_CAT=0 -DENABLE_EXPAT=0 -DENABLE_ICONV=0 -DENABLE_INSTALL=0 -DENABLE_LIBB2=0 -DENABLE_LibGCC=0 -DENABLE_LIBXML2=0 -DENABLE_LZ4=0 -DENABLE_LZMA=0 -DENABLE_LZO=0 -DENABLE_MBEDTLS=0 -DENABLE_NETTLE=0 -DENABLE_OPENSSL=0 -DENABLE_PCRE2POSIX=0 -DENABLE_PCREPOSIX=0 -DENABLE_TEST=0 -DENABLE_WERROR=0 -DENABLE_ZLIB=0 -DENABLE_ZSTD=0 -DHAVE_ZLIB_H=1 ..
|
||||
Run cmake --build . --clean-first --config Release --verbose --target archive_static
|
||||
|
||||
Copy-Item libarchive\archive.lib $BUN_DEPS_OUT_DIR\archive.lib
|
||||
Write-Host "-> archive.lib"
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
40
scripts/build-libarchive.sh
Executable file
40
scripts/build-libarchive.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
export FORCE_PIC=1
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
|
||||
cd $BUN_DEPS_DIR/libarchive
|
||||
# Libarchive has a "build" folder which we must not use
|
||||
rm -rf libarchive-build
|
||||
mkdir -p libarchive-build
|
||||
cd libarchive-build
|
||||
cmake $CMAKE_FLAGS \
|
||||
-DBUILD_SHARED_LIBS=0 \
|
||||
-DENABLE_BZIP2=0 \
|
||||
-DENABLE_CAT=0 \
|
||||
-DENABLE_EXPAT=0 \
|
||||
-DENABLE_ICONV=0 \
|
||||
-DENABLE_INSTALL=0 \
|
||||
-DENABLE_LIBB2=0 \
|
||||
-DENABLE_LibGCC=0 \
|
||||
-DENABLE_LIBXML2=0 \
|
||||
-DENABLE_LZ4=0 \
|
||||
-DENABLE_LZMA=0 \
|
||||
-DENABLE_LZO=0 \
|
||||
-DENABLE_MBEDTLS=0 \
|
||||
-DENABLE_NETTLE=0 \
|
||||
-DENABLE_OPENSSL=0 \
|
||||
-DENABLE_PCRE2POSIX=0 \
|
||||
-DENABLE_PCREPOSIX=0 \
|
||||
-DENABLE_TEST=0 \
|
||||
-DENABLE_WERROR=0 \
|
||||
-DENABLE_ZLIB=0 \
|
||||
-DENABLE_ZSTD=0 \
|
||||
-DHAVE_ZLIB_H=1 \
|
||||
-GNinja \
|
||||
-B . -S ..
|
||||
cmake --build . --target libarchive.a --config Release -- -j$CPUS
|
||||
|
||||
cp ./libarchive/libarchive.a $BUN_DEPS_OUT_DIR/libarchive.a
|
||||
16
scripts/build-libdeflate.ps1
Normal file
16
scripts/build-libdeflate.ps1
Normal file
@@ -0,0 +1,16 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'libdeflate')
|
||||
try {
|
||||
Remove-Item CMakeCache.txt, CMakeFiles, build -Recurse -ErrorAction SilentlyContinue
|
||||
mkdir -Force build
|
||||
|
||||
Run cmake -S "." -B build @CMAKE_FLAGS -DLIBDEFLATE_BUILD_STATIC_LIB=ON -DLIBDEFLATE_BUILD_SHARED_LIB=OFF -DLIBDEFLATE_BUILD_GZIP=OFF
|
||||
Run cmake --build build --clean-first --config Release
|
||||
|
||||
# In https://github.com/ebiggers/libdeflate/releases/tag/v1.20, it's outputting libdeflate.a even on Windows
|
||||
Copy-Item build/deflatestatic.lib $BUN_DEPS_OUT_DIR/deflate.lib
|
||||
Write-Host "-> deflate.lib"
|
||||
} finally { Pop-Location }
|
||||
|
||||
10
scripts/build-libdeflate.sh
Executable file
10
scripts/build-libdeflate.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
cd $BUN_DEPS_DIR/libdeflate
|
||||
rm -rf build CMakeCache.txt CMakeFiles
|
||||
cmake "${CMAKE_FLAGS[@]}" -DLIBDEFLATE_BUILD_STATIC_LIB=ON -DLIBDEFLATE_BUILD_SHARED_LIB=OFF -DLIBDEFLATE_BUILD_GZIP=OFF -B build -S . -G Ninja
|
||||
ninja libdeflate.a -C build
|
||||
cp build/libdeflate.a $BUN_DEPS_OUT_DIR/libdeflate.a
|
||||
46
scripts/build-libuv.ps1
Executable file
46
scripts/build-libuv.ps1
Executable file
@@ -0,0 +1,46 @@
|
||||
param(
|
||||
[bool] $CloneOnly = $false
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
$CWD = Get-Location
|
||||
|
||||
$Source = (Join-Path $PSScriptRoot "../src/deps/libuv")
|
||||
$Commit = "da527d8d2a908b824def74382761566371439003"
|
||||
|
||||
if (!(Test-Path -PathType Container $Source)) {
|
||||
Write-Host "Cloning libuv: $Commit"
|
||||
New-Item -ItemType Directory -Force -Path $Source
|
||||
Push-Location $Source
|
||||
try {
|
||||
Run git init
|
||||
Run git remote add origin "https://github.com/libuv/libuv"
|
||||
Run git fetch --depth 1 origin $Commit
|
||||
Run git checkout FETCH_HEAD
|
||||
} finally { Pop-Location }
|
||||
} else {
|
||||
Push-Location $Source
|
||||
try {
|
||||
$CurrentCommit = git rev-parse HEAD
|
||||
if ($CurrentCommit -ne $Commit) {
|
||||
Write-Host "Updating libuv: $Commit"
|
||||
Run git fetch --depth 1 origin $Commit
|
||||
Run git checkout FETCH_HEAD
|
||||
}
|
||||
} finally { Pop-Location }
|
||||
}
|
||||
|
||||
if(!($CloneOnly)) {
|
||||
Push-Location $Source
|
||||
try {
|
||||
$null = mkdir build -ErrorAction SilentlyContinue
|
||||
Set-Location build
|
||||
|
||||
Run cmake .. @CMAKE_FLAGS "-DCMAKE_C_FLAGS=/DWIN32 /D_WINDOWS -Wno-int-conversion"
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item libuv.lib $BUN_DEPS_OUT_DIR
|
||||
Write-Host "-> libuv.lib"
|
||||
} finally { Pop-Location }
|
||||
}
|
||||
11
scripts/build-lolhtml.ps1
Executable file
11
scripts/build-lolhtml.ps1
Executable file
@@ -0,0 +1,11 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'lol-html/c-api')
|
||||
try {
|
||||
Run cargo build --release --target x86_64-pc-windows-msvc
|
||||
|
||||
Copy-Item target/x86_64-pc-windows-msvc/release/lolhtml.lib $BUN_DEPS_OUT_DIR
|
||||
Copy-Item target/x86_64-pc-windows-msvc/release/lolhtml.pdb $BUN_DEPS_OUT_DIR
|
||||
Write-Host "-> lolhtml.lib"
|
||||
} finally { Pop-Location }
|
||||
7
scripts/build-lolhtml.sh
Executable file
7
scripts/build-lolhtml.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
cd $BUN_DEPS_DIR/lol-html/c-api
|
||||
cargo build --release
|
||||
cp target/release/liblolhtml.a $BUN_DEPS_OUT_DIR/liblolhtml.a
|
||||
18
scripts/build-lshpack.ps1
Executable file
18
scripts/build-lshpack.ps1
Executable file
@@ -0,0 +1,18 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'ls-hpack')
|
||||
try {
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake .. @CMAKE_FLAGS `
|
||||
-DCMAKE_BUILD_TYPE=Release `
|
||||
-DLSHPACK_XXH=ON `
|
||||
-DSHARED=0
|
||||
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item ls-hpack.lib $BUN_DEPS_OUT_DIR/lshpack.lib
|
||||
|
||||
Write-Host "-> lshpack.lib"
|
||||
} finally { Pop-Location }
|
||||
20
scripts/build-lshpack.sh
Executable file
20
scripts/build-lshpack.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
rm -rf CMakeFiles CMakeCache build.ninja
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
|
||||
cd $BUN_DEPS_DIR/ls-hpack
|
||||
|
||||
rm -rf CMakeCache* CMakeFiles
|
||||
|
||||
cmake "${CMAKE_FLAGS[@]}" . \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DLSHPACK_XXH=ON \
|
||||
-DSHARED=0 \
|
||||
-GNinja
|
||||
|
||||
ninja libls-hpack.a
|
||||
|
||||
cp ./libls-hpack.a $BUN_DEPS_OUT_DIR/liblshpack.a
|
||||
43
scripts/build-mimalloc-debug.sh
Executable file
43
scripts/build-mimalloc-debug.sh
Executable file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
|
||||
|
||||
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}
|
||||
MIMALLOC_VALGRIND_ENABLED_FLAG=${MIMALLOC_VALGRIND_ENABLED_FLAG:-}
|
||||
|
||||
cd $BUN_DEPS_DIR/mimalloc
|
||||
|
||||
rm -rf CMakeCache* CMakeFiles build
|
||||
|
||||
mkdir build
|
||||
|
||||
cd build
|
||||
|
||||
cmake "${CMAKE_FLAGS[@]}" .. \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DMI_DEBUG_FULL=1 \
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=1 \
|
||||
-DMI_BUILD_SHARED=OFF \
|
||||
-DMI_BUILD_STATIC=ON \
|
||||
-DMI_BUILD_TESTS=OFF \
|
||||
-DMI_OSX_ZONE=OFF \
|
||||
-DMI_OSX_INTERPOSE=OFF \
|
||||
-DMI_BUILD_OBJECT=ON \
|
||||
-DMI_OVERRIDE=OFF \
|
||||
-DMI_TRACK_VALGRIND=ON \
|
||||
-DMI_USE_CXX=ON \
|
||||
-GNinja
|
||||
|
||||
ninja
|
||||
|
||||
if [ -f libmimalloc-valgrind-debug.a ]; then
|
||||
file="libmimalloc-valgrind-debug.a"
|
||||
elif [ -f libmimalloc-debug.a ]; then
|
||||
file="libmimalloc-debug.a"
|
||||
else
|
||||
echo "Could not find libmimalloc-valgrind-debug.a or libmimalloc-debug.a"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cp $file $BUN_DEPS_OUT_DIR/libmimalloc-debug.a
|
||||
cp CMakeFiles/mimalloc-obj.dir/src/static.c.o $BUN_DEPS_OUT_DIR/libmimalloc-debug.o
|
||||
26
scripts/build-mimalloc.ps1
Executable file
26
scripts/build-mimalloc.ps1
Executable file
@@ -0,0 +1,26 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'mimalloc')
|
||||
try {
|
||||
Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake .. @CMAKE_FLAGS `
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=1 `
|
||||
-DMI_BUILD_SHARED=OFF `
|
||||
-DMI_BUILD_STATIC=ON `
|
||||
-DMI_BUILD_TESTS=OFF `
|
||||
-DMI_OSX_ZONE=OFF `
|
||||
-DMI_OSX_INTERPOSE=OFF `
|
||||
-DMI_BUILD_OBJECT=ON `
|
||||
-DMI_USE_CXX=ON `
|
||||
-DMI_OVERRIDE=OFF `
|
||||
-DMI_OSX_ZONE=OFF
|
||||
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item mimalloc-static.lib $BUN_DEPS_OUT_DIR/mimalloc.lib
|
||||
|
||||
Write-Host "-> mimalloc.lib"
|
||||
} finally { Pop-Location }
|
||||
29
scripts/build-mimalloc.sh
Executable file
29
scripts/build-mimalloc.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
|
||||
|
||||
MIMALLOC_OVERRIDE_FLAG=${MIMALLOC_OVERRIDE_FLAG:-}
|
||||
MIMALLOC_VALGRIND_ENABLED_FLAG=${MIMALLOC_VALGRIND_ENABLED_FLAG:-}
|
||||
|
||||
cd $BUN_DEPS_DIR/mimalloc
|
||||
|
||||
rm -rf CMakeCache* CMakeFiles
|
||||
|
||||
cmake "${CMAKE_FLAGS[@]}" . \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=1 \
|
||||
-DMI_BUILD_SHARED=OFF \
|
||||
-DMI_BUILD_STATIC=ON \
|
||||
-DMI_BUILD_TESTS=OFF \
|
||||
-DMI_OSX_ZONE=OFF \
|
||||
-DMI_OSX_INTERPOSE=OFF \
|
||||
-DMI_BUILD_OBJECT=ON \
|
||||
-DMI_USE_CXX=ON \
|
||||
-DMI_OVERRIDE=OFF \
|
||||
-DMI_OSX_ZONE=OFF \
|
||||
-GNinja
|
||||
|
||||
ninja
|
||||
|
||||
cp libmimalloc.a $BUN_DEPS_OUT_DIR/libmimalloc.a
|
||||
cp CMakeFiles/mimalloc-obj.dir/src/static.c.o $BUN_DEPS_OUT_DIR/libmimalloc.o
|
||||
29
scripts/build-tinycc.ps1
Executable file
29
scripts/build-tinycc.ps1
Executable file
@@ -0,0 +1,29 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'tinycc')
|
||||
try {
|
||||
cd win32
|
||||
Run .\build-tcc.bat -clean
|
||||
cd ..
|
||||
|
||||
Set-Content -Path config.h -Value @"
|
||||
#define TCC_VERSION "$(Get-Content VERSION)"
|
||||
#define TCC_GITHASH "$(git rev-parse --short HEAD)"
|
||||
#define CONFIG_TCCDIR "$((Get-Location).Path.Replace('\', '/'))"
|
||||
#define CONFIG_TCC_PREDEFS 1
|
||||
#ifdef TCC_TARGET_X86_64
|
||||
#define CONFIG_TCC_CROSSPREFIX "$PX%-"
|
||||
#endif
|
||||
"@
|
||||
|
||||
Run clang-cl -DTCC_TARGET_PE -DTCC_TARGET_X86_64 config.h -DC2STR -o c2str.exe conftest.c
|
||||
Run .\c2str.exe .\include\tccdefs.h tccdefs_.h
|
||||
|
||||
Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c" "-MT"
|
||||
Run llvm-lib "tcc.obj" "-OUT:tcc.lib"
|
||||
|
||||
Copy-Item tcc.obj $BUN_DEPS_OUT_DIR/tcc.lib
|
||||
|
||||
Write-Host "-> tcc.lib"
|
||||
} finally { Pop-Location }
|
||||
11
scripts/build-tinycc.sh
Executable file
11
scripts/build-tinycc.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
|
||||
cd $BUN_DEPS_DIR/tinycc
|
||||
make clean
|
||||
CFLAGS="${CFLAGS} -DTCC_LIBTCC1=\\\"\0\\\"" ./configure --enable-static --cc="$CC" --ar="$AR" --config-predefs=yes
|
||||
make libtcc.a -j$CPUS
|
||||
cp libtcc.a $BUN_DEPS_OUT_DIR
|
||||
29
scripts/build-zlib.ps1
Executable file
29
scripts/build-zlib.ps1
Executable file
@@ -0,0 +1,29 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'zlib')
|
||||
try {
|
||||
Run git reset --hard
|
||||
|
||||
# TODO: make a patch upstream to change the line
|
||||
# `#ifdef _MSC_VER`
|
||||
# to account for clang-cl, which implements `__builtin_ctzl` and `__builtin_expect`
|
||||
$textToReplace = [regex]::Escape("int __inline __builtin_ctzl(unsigned long mask)") + "[^}]*}"
|
||||
$fileContent = Get-Content "deflate.h" -Raw
|
||||
if ($fileContent -match $textToReplace) {
|
||||
Set-Content -Path "deflate.h" -Value ($fileContent -replace $textToReplace, "")
|
||||
}
|
||||
else {
|
||||
throw "Failed to patch deflate.h"
|
||||
}
|
||||
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake .. @CMAKE_FLAGS
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item zlib.lib $BUN_DEPS_OUT_DIR
|
||||
|
||||
Write-Host "-> zlib.lib"
|
||||
}
|
||||
finally { Pop-Location }
|
||||
12
scripts/build-zlib.sh
Executable file
12
scripts/build-zlib.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
cd $BUN_DEPS_DIR/zlib
|
||||
rm -rf build
|
||||
mkdir build
|
||||
cd build
|
||||
cmake $CMAKE_FLAGS -G Ninja -DCMAKE_BUILD_TYPE=Release ..
|
||||
ninja
|
||||
cp ./libz.a $BUN_DEPS_OUT_DIR/libz.a
|
||||
14
scripts/build-zstd.ps1
Executable file
14
scripts/build-zstd.ps1
Executable file
@@ -0,0 +1,14 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
Push-Location (Join-Path $BUN_DEPS_DIR 'zstd')
|
||||
try {
|
||||
Remove-Item CMakeCache.txt, CMakeFiles -Recurse -ErrorAction SilentlyContinue
|
||||
|
||||
# CL_SHOWINCLUDES_PREFIX is workaround for cmake bug in 3.28. .ninja_deps still needs to be deleted. Bug is fixed in 3.30
|
||||
Run cmake -S "build/cmake" @CMAKE_FLAGS -DZSTD_BUILD_STATIC=ON -DCMAKE_CL_SHOWINCLUDES_PREFIX="Note: including file:"
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item lib/zstd_static.lib $BUN_DEPS_OUT_DIR/zstd.lib
|
||||
Write-Host "-> zstd.lib"
|
||||
} finally { Pop-Location }
|
||||
11
scripts/build-zstd.sh
Executable file
11
scripts/build-zstd.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
|
||||
cd $BUN_DEPS_DIR/zstd
|
||||
rm -rf Release CMakeCache.txt CMakeFiles
|
||||
cmake "${CMAKE_FLAGS[@]}" -DZSTD_BUILD_STATIC=ON -B Release -S build/cmake -G Ninja
|
||||
ninja libzstd_static -C Release
|
||||
cp Release/lib/libzstd.a $BUN_DEPS_OUT_DIR/libzstd.a
|
||||
@@ -1,251 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { spawn as nodeSpawn } from "node:child_process";
|
||||
import { existsSync, readFileSync, readdirSync } from "node:fs";
|
||||
import { join, relative, resolve } from "node:path";
|
||||
|
||||
// https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem
|
||||
const generateFlags = [
|
||||
["-S", "string", "path to source directory"],
|
||||
["-B", "string", "path to build directory"],
|
||||
["-D", "string", "define a build option (e.g. -DCMAKE_BUILD_TYPE=Release)"],
|
||||
["-G", "string", "build generator (e.g. -GNinja)"],
|
||||
["-W", "string", "enable warnings (e.g. -Wno-dev)"],
|
||||
["--fresh", "boolean", "force a fresh build"],
|
||||
["--log-level", "string", "set the log level"],
|
||||
["--debug-output", "boolean", "print debug output"],
|
||||
["--toolchain", "string", "the toolchain to use"],
|
||||
];
|
||||
|
||||
// https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem
|
||||
const buildFlags = [
|
||||
["--config", "string", "build configuration (e.g. --config Release)"],
|
||||
["--target", "string", "build target"],
|
||||
["-t", "string", "same as --target"],
|
||||
["--parallel", "number", "number of parallel jobs"],
|
||||
["-j", "number", "same as --parallel"],
|
||||
["--verbose", "boolean", "enable verbose output"],
|
||||
["-v", "boolean", "same as --verbose"],
|
||||
];
|
||||
|
||||
async function build(args) {
|
||||
if (process.platform === "win32" && !process.env["VSINSTALLDIR"]) {
|
||||
const shellPath = join(import.meta.dirname, "vs-shell.ps1");
|
||||
const scriptPath = import.meta.filename;
|
||||
return spawn("pwsh", ["-NoProfile", "-NoLogo", "-File", shellPath, process.argv0, scriptPath, ...args]);
|
||||
}
|
||||
|
||||
const env = {
|
||||
...process.env,
|
||||
FORCE_COLOR: "1",
|
||||
CLICOLOR_FORCE: "1",
|
||||
};
|
||||
|
||||
const generateOptions = parseOptions(args, generateFlags);
|
||||
const buildOptions = parseOptions(args, buildFlags);
|
||||
|
||||
const buildPath = resolve(generateOptions["-B"] || buildOptions["--build"] || "build");
|
||||
generateOptions["-B"] = buildPath;
|
||||
buildOptions["--build"] = buildPath;
|
||||
|
||||
if (!generateOptions["-S"]) {
|
||||
generateOptions["-S"] = process.cwd();
|
||||
}
|
||||
|
||||
const cacheRead = isCacheReadEnabled();
|
||||
const cacheWrite = isCacheWriteEnabled();
|
||||
if (cacheRead || cacheWrite) {
|
||||
const cachePath = getCachePath();
|
||||
if (cacheRead && !existsSync(cachePath)) {
|
||||
const mainCachePath = getCachePath(getDefaultBranch());
|
||||
if (existsSync(mainCachePath)) {
|
||||
mkdirSync(cachePath, { recursive: true });
|
||||
try {
|
||||
cpSync(mainCachePath, cachePath, { recursive: true, force: true });
|
||||
} catch (err) {
|
||||
switch (err?.code) {
|
||||
case "EPERM":
|
||||
case "EACCES":
|
||||
try {
|
||||
chmodSync(mainCachePath, 0o777);
|
||||
} catch (e2) {}
|
||||
|
||||
cpSync(mainCachePath, cachePath, { recursive: true, force: true });
|
||||
break;
|
||||
default:
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
generateOptions["-DCACHE_PATH"] = cmakePath(cachePath);
|
||||
generateOptions["--fresh"] = undefined;
|
||||
if (cacheRead && cacheWrite) {
|
||||
generateOptions["-DCACHE_STRATEGY"] = "read-write";
|
||||
} else if (cacheRead) {
|
||||
generateOptions["-DCACHE_STRATEGY"] = "read-only";
|
||||
} else if (cacheWrite) {
|
||||
generateOptions["-DCACHE_STRATEGY"] = "write-only";
|
||||
}
|
||||
}
|
||||
|
||||
const toolchain = generateOptions["--toolchain"];
|
||||
if (toolchain) {
|
||||
const toolchainPath = resolve(import.meta.dirname, "..", "cmake", "toolchains", `${toolchain}.cmake`);
|
||||
generateOptions["--toolchain"] = toolchainPath;
|
||||
}
|
||||
|
||||
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
|
||||
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
|
||||
);
|
||||
await spawn("cmake", generateArgs, { env });
|
||||
|
||||
const envPath = resolve(buildPath, ".env");
|
||||
if (existsSync(envPath)) {
|
||||
const envFile = readFileSync(envPath, "utf8");
|
||||
for (const line of envFile.split(/\r\n|\n|\r/)) {
|
||||
const [key, value] = line.split("=");
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
const buildArgs = Object.entries(buildOptions)
|
||||
.sort(([a], [b]) => (a === "--build" ? -1 : a.localeCompare(b)))
|
||||
.flatMap(([flag, value]) => [flag, value]);
|
||||
await spawn("cmake", buildArgs, { env });
|
||||
|
||||
const buildFiles = ["ccache.log", "compile_commands.json"];
|
||||
const buildPaths = [buildPath, ...readdirSync(buildPath).map(path => join(buildPath, path))];
|
||||
const buildArtifacts = [];
|
||||
for (const buildPath of buildPaths) {
|
||||
for (const buildFile of buildFiles) {
|
||||
const path = join(buildPath, buildFile);
|
||||
if (existsSync(path)) {
|
||||
buildArtifacts.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isBuildkite()) {
|
||||
await Promise.all(buildArtifacts.map(path => spawn("buildkite-agent", ["artifact", "upload", relative(buildPath, path)], { cwd: buildPath, env })));
|
||||
}
|
||||
}
|
||||
|
||||
function cmakePath(path) {
|
||||
return path.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
function getCachePath(branch) {
|
||||
const buildPath = process.env.BUILDKITE_BUILD_PATH;
|
||||
const repository = process.env.BUILDKITE_REPO;
|
||||
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
|
||||
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
|
||||
const branchKey = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
|
||||
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
|
||||
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
|
||||
}
|
||||
|
||||
function isCacheReadEnabled() {
|
||||
return (
|
||||
isBuildkite() &&
|
||||
process.env.BUILDKITE_CLEAN_CHECKOUT !== "true" &&
|
||||
process.env.BUILDKITE_BRANCH !== getDefaultBranch()
|
||||
);
|
||||
}
|
||||
|
||||
function isCacheWriteEnabled() {
|
||||
return isBuildkite();
|
||||
}
|
||||
|
||||
function isBuildkite() {
|
||||
return process.env.BUILDKITE === "true";
|
||||
}
|
||||
|
||||
function getDefaultBranch() {
|
||||
return process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH || "main";
|
||||
}
|
||||
|
||||
function parseOptions(args, flags = []) {
|
||||
const options = {};
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
|
||||
for (const [flag, type] of flags) {
|
||||
if (arg === flag) {
|
||||
if (type === "boolean") {
|
||||
options[arg] = undefined;
|
||||
} else {
|
||||
options[arg] = args[++i];
|
||||
}
|
||||
} else if (arg.startsWith(flag)) {
|
||||
const delim = arg.indexOf("=");
|
||||
if (delim === -1) {
|
||||
options[flag] = arg.slice(flag.length);
|
||||
} else {
|
||||
options[arg.slice(0, delim)] = arg.slice(delim + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
async function spawn(command, args, options) {
|
||||
const effectiveArgs = args.filter(Boolean);
|
||||
const description = [command, ...effectiveArgs].map(arg => (arg.includes(" ") ? JSON.stringify(arg) : arg)).join(" ");
|
||||
console.log("$", description);
|
||||
|
||||
const subprocess = nodeSpawn(command, effectiveArgs, {
|
||||
stdio: "pipe",
|
||||
...options,
|
||||
});
|
||||
|
||||
let timestamp;
|
||||
subprocess.on("spawn", () => {
|
||||
timestamp = Date.now();
|
||||
});
|
||||
|
||||
const stdout = new Promise(resolve => {
|
||||
subprocess.stdout.on("end", resolve);
|
||||
subprocess.stdout.on("data", data => process.stdout.write(data));
|
||||
});
|
||||
|
||||
const stderr = new Promise(resolve => {
|
||||
subprocess.stderr.on("end", resolve);
|
||||
subprocess.stderr.on("data", data => process.stderr.write(data));
|
||||
});
|
||||
|
||||
const done = Promise.all([stdout, stderr]);
|
||||
|
||||
const { error, exitCode, signalCode } = await new Promise(resolve => {
|
||||
subprocess.on("error", error => resolve({ error }));
|
||||
subprocess.on("exit", (exitCode, signalCode) => resolve({ exitCode, signalCode }));
|
||||
});
|
||||
|
||||
await done;
|
||||
|
||||
const duration = Date.now() - timestamp;
|
||||
if (duration > 60000) {
|
||||
console.log(`Took ${(duration / 60000).toFixed(2)} minutes`);
|
||||
} else {
|
||||
console.log(`Took ${(duration / 1000).toFixed(2)} seconds`);
|
||||
}
|
||||
|
||||
if (exitCode === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
console.error(error);
|
||||
} else if (signalCode) {
|
||||
console.error(`Command killed: ${signalCode}`);
|
||||
} else {
|
||||
console.error(`Command exited: code ${exitCode}`);
|
||||
}
|
||||
|
||||
process.exit(exitCode ?? 1);
|
||||
}
|
||||
|
||||
build(process.argv.slice(2));
|
||||
2
scripts/build.ps1
Executable file
2
scripts/build.ps1
Executable file
@@ -0,0 +1,2 @@
|
||||
.\scripts\env.ps1
|
||||
ninja -Cbuild
|
||||
50
scripts/buildkite-link-bun.ps1
Executable file
50
scripts/buildkite-link-bun.ps1
Executable file
@@ -0,0 +1,50 @@
|
||||
param(
|
||||
[switch]$Baseline = $false
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
|
||||
$Target = If ($Baseline) { "windows-x64-baseline" } Else { "windows-x64" }
|
||||
$Tag = "bun-$Target"
|
||||
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
|
||||
mkdir -Force build
|
||||
buildkite-agent artifact download "**" build --step "${Target}-build-zig"
|
||||
buildkite-agent artifact download "**" build --step "${Target}-build-cpp"
|
||||
buildkite-agent artifact download "**" build --step "${Target}-build-deps"
|
||||
mv -Force -ErrorAction SilentlyContinue build\build\bun-deps\* build\bun-deps
|
||||
mv -Force -ErrorAction SilentlyContinue build\build\* build
|
||||
|
||||
Set-Location build
|
||||
|
||||
# HACK: See scripts/build-bun-cpp.ps1
|
||||
Join-File -Path "$(Resolve-Path .)\bun-cpp-objects.a" -Verbose -DeletePartFiles
|
||||
|
||||
cmake .. @CMAKE_FLAGS `
|
||||
-G Ninja `
|
||||
-DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)"
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
|
||||
ninja -v -j $env:CPUS
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
|
||||
Set-Location ..
|
||||
$Dist = mkdir -Force "${Tag}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive -Force "$Dist" "${Dist}.zip"
|
||||
$Dist = "$Dist-profile"
|
||||
MkDir -Force "$Dist"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
cp -r build\bun.pdb "$Dist\bun.pdb"
|
||||
Compress-Archive -Force "$Dist" "$Dist.zip"
|
||||
|
||||
$env:BUN_GARBAGE_COLLECTOR_LEVEL = "1"
|
||||
$env:BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING = "1"
|
||||
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
|
||||
42
scripts/bump.ts
Normal file
42
scripts/bump.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import path from "path";
|
||||
|
||||
process.chdir(path.join(import.meta.dir, "../"));
|
||||
|
||||
const git_branch = await Bun.$`git rev-parse --abbrev-ref HEAD`.text();
|
||||
|
||||
if (git_branch.trim() !== "main") {
|
||||
console.error("You must be on the main branch to run this script");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const kinds = ["major", "minor", "patch"];
|
||||
const increment = kinds.findIndex(type => process.argv[2] === type);
|
||||
if (increment === -1) {
|
||||
console.error("Usage: bun bump <major|minor|patch>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const cmakelists = await Bun.file("./CMakeLists.txt").text();
|
||||
|
||||
const found_version_line = cmakelists.indexOf("set(Bun_VERSION");
|
||||
if (found_version_line === -1) {
|
||||
throw new Error("Could not find version line in CMakeLists.txt");
|
||||
}
|
||||
|
||||
const version = /set\(Bun_VERSION "([0-9]+\.[0-9]+\.[0-9]+)"/.exec(cmakelists);
|
||||
if (!version) {
|
||||
throw new Error("Could not find version in CMakeLists.txt");
|
||||
}
|
||||
|
||||
const to_arg = process.argv.find(arg => arg.startsWith("--last-version="));
|
||||
const to = to_arg ? to_arg.slice("--last-version=".length) : version[1];
|
||||
|
||||
const updated_version = to
|
||||
.split(".")
|
||||
.map((v, i) => (i === increment ? parseInt(v) + 1 : i < increment ? parseInt(v) : 0))
|
||||
.join(".");
|
||||
|
||||
await Bun.write("./CMakeLists.txt", cmakelists.replace(version[1], updated_version));
|
||||
await Bun.write("LATEST", to);
|
||||
|
||||
console.log("Bumping version from %s to %s", version[1], updated_version);
|
||||
35
scripts/calculate-canary-revision.sh
Executable file
35
scripts/calculate-canary-revision.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
if [ -z "$GITHUB_TOKEN" ]; then
|
||||
echo "GITHUB_TOKEN is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
query="\"query{repository(owner:\\\"oven-sh\\\",name:\\\"bun\\\"){releases(first:1){edges{node{tagName}}}}}\""
|
||||
tagName=$(curl -fsSL "https://api.github.com/graphql" -X POST -d '{"query":'${query}'}' \
|
||||
-H "Authorization: bearer ${GITHUB_TOKEN}" -H "Content-Type: application/json" \
|
||||
| jq -r '.data.repository.releases.edges[0].node.tagName')
|
||||
|
||||
if [ -z "$headRef" ]; then
|
||||
headRef=$(git rev-parse HEAD)
|
||||
fi
|
||||
|
||||
query="\"query{repository(owner:\\\"oven-sh\\\",name:\\\"bun\\\"){ref(qualifiedName:\\\"${tagName}\\\"){compare(headRef:\\\"${headRef}\\\"){aheadBy}}}}\""
|
||||
aheadBy=$(curl -fsSL "https://api.github.com/graphql" -X POST -d '{"query":'${query}'}' \
|
||||
-H "Authorization: bearer ${GITHUB_TOKEN}" -H "Content-Type: application/json" \
|
||||
| jq -r '.data.repository.ref.compare.aheadBy')
|
||||
|
||||
if [ "$1" == '--raw' ]; then
|
||||
if [ "$aheadBy" == "null" ]; then
|
||||
echo "1"
|
||||
else
|
||||
echo "${aheadBy}"
|
||||
fi
|
||||
else
|
||||
echo "Latest version is ${tagName}"
|
||||
if [ "$aheadBy" == "null" ]; then
|
||||
echo "Current commit is not available on GitHub.com"
|
||||
else
|
||||
echo "Ahead by ${aheadBy} commits."
|
||||
fi
|
||||
echo "(call script with --raw to print just a number)"
|
||||
fi
|
||||
47
scripts/clean-dependencies.ps1
Executable file
47
scripts/clean-dependencies.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
. (Join-Path $PSScriptRoot "env.ps1")
|
||||
$ErrorActionPreference = 'SilentlyContinue' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
|
||||
function Reset-Submodule {
|
||||
param (
|
||||
$Repository
|
||||
)
|
||||
Push-Location $Repository
|
||||
try {
|
||||
Run git reset --hard
|
||||
Run git clean -fdx
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
}
|
||||
|
||||
$Deps = Join-Path $PSScriptRoot "../src/deps"
|
||||
$DepsOut = Join-Path $PSScriptRoot "../src/deps"
|
||||
|
||||
Reset-Submodule $Deps\base64
|
||||
Reset-Submodule $Deps\boringssl
|
||||
Reset-Submodule $Deps\c-ares
|
||||
Reset-Submodule $Deps\libarchive
|
||||
Reset-Submodule $Deps\lol-html
|
||||
Reset-Submodule $Deps\mimalloc
|
||||
Reset-Submodule $Deps\picohttpparser
|
||||
Reset-Submodule $Deps\tinycc
|
||||
Reset-Submodule $Deps\zlib
|
||||
Reset-Submodule $Deps\zstd
|
||||
Reset-Submodule $Deps\ls-hpack
|
||||
|
||||
Remove-Item -Force $DepsOut\base64.lib
|
||||
Remove-Item -Force $DepsOut\crypto.lib
|
||||
Remove-Item -Force $DepsOut\ssl.lib
|
||||
Remove-Item -Force $DepsOut\decrepit.lib
|
||||
Remove-Item -Force $DepsOut\cares.lib
|
||||
Remove-Item -Force $DepsOut\archive.lib
|
||||
Remove-Item -Force $DepsOut\lolhtml.lib
|
||||
Remove-Item -Force $DepsOut\mimalloc.lib
|
||||
Remove-Item -Force $DepsOut\tcc.lib
|
||||
Remove-Item -Force $DepsOut\zlib.lib
|
||||
Remove-Item -Force $DepsOut\zstd.lib
|
||||
Remove-Item -Force $DepsOut\libuv.lib
|
||||
Remove-Item -Force $DepsOut\lshpack.lib
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
28
scripts/clean-dependencies.sh
Executable file
28
scripts/clean-dependencies.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
FORCE=
|
||||
|
||||
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
|
||||
BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd .. && pwd)}
|
||||
BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/src/deps/}
|
||||
BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps/}
|
||||
|
||||
rm -f $BUN_DEPS_OUT_DIR/*.a
|
||||
|
||||
git_reset() {
|
||||
dir=$(pwd)
|
||||
cd $1
|
||||
git reset --hard
|
||||
git clean -fdx
|
||||
cd $dir
|
||||
}
|
||||
|
||||
git_reset $BUN_DEPS_DIR/boringssl
|
||||
git_reset $BUN_DEPS_DIR/c-ares
|
||||
git_reset $BUN_DEPS_DIR/libarchive
|
||||
git_reset $BUN_DEPS_DIR/lol-html
|
||||
git_reset $BUN_DEPS_DIR/mimalloc
|
||||
git_reset $BUN_DEPS_DIR/picohttpparser
|
||||
git_reset $BUN_DEPS_DIR/tinycc
|
||||
git_reset $BUN_DEPS_DIR/zlib
|
||||
git_reset $BUN_DEPS_DIR/zstd
|
||||
git_reset $BUN_DEPS_DIR/ls-hpack
|
||||
73
scripts/cross-compile-codegen.sh
Executable file
73
scripts/cross-compile-codegen.sh
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
export TARGET_PLATFORM=${1:-win32}
|
||||
export TARGET_ARCH=${2:-x64}
|
||||
|
||||
if ! which bun; then
|
||||
export PATH="$PATH:$HOME/.bun/bin"
|
||||
fi
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../"
|
||||
|
||||
OUT=build-codegen-${TARGET_PLATFORM}-${TARGET_ARCH}
|
||||
|
||||
if [ -n "$3" ]; then
|
||||
OUT="$3"
|
||||
fi
|
||||
|
||||
rm -rf "$OUT/codegen"
|
||||
rm -rf "$OUT/js"
|
||||
mkdir -p "$OUT"
|
||||
mkdir -p "$OUT/"{codegen,js,tmp_functions,tmp_modules}
|
||||
|
||||
OUT=$(realpath "$OUT")
|
||||
|
||||
task() {
|
||||
echo '$ '"$@"
|
||||
"$@"
|
||||
if [ "$?" != "0" ]; then
|
||||
# some scripts are flaky, run them again
|
||||
echo "!!! retrying"
|
||||
"$@"
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "!!! failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
task bun ./src/codegen/bundle-modules.ts --debug=OFF "$OUT"
|
||||
|
||||
rm -rf "$OUT/tmp_functions"
|
||||
rm -rf "$OUT/tmp_modules"
|
||||
|
||||
CLASSES=(
|
||||
./src/bun.js/*.classes.ts
|
||||
./src/bun.js/api/*.classes.ts
|
||||
./src/bun.js/test/*.classes.ts
|
||||
./src/bun.js/webcore/*.classes.ts
|
||||
./src/bun.js/node/*.classes.ts
|
||||
)
|
||||
task bun "./src/codegen/generate-classes.ts" ${CLASSES[@]} "$OUT/codegen"
|
||||
|
||||
LUTS=(
|
||||
./src/bun.js/bindings/BunObject.cpp
|
||||
./src/bun.js/bindings/ZigGlobalObject.lut.txt
|
||||
./src/bun.js/bindings/JSBuffer.cpp
|
||||
./src/bun.js/bindings/BunProcess.cpp
|
||||
./src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
./src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
)
|
||||
for lut in ${LUTS[@]}; do
|
||||
result=$(basename $lut | sed 's/.lut.txt/.cpp/' | sed 's/.cpp/.lut.h/')
|
||||
task bun "./src/codegen/create-hash-table.ts" "$lut" "$OUT/codegen/$result"
|
||||
done
|
||||
|
||||
task bun "./src/codegen/generate-jssink.ts" "$OUT/codegen"
|
||||
|
||||
wait
|
||||
|
||||
rm -rf "$OUT/tmp"*
|
||||
|
||||
echo "-> `basename "$OUT"`"
|
||||
42
scripts/download-webkit.ps1
Executable file
42
scripts/download-webkit.ps1
Executable file
@@ -0,0 +1,42 @@
|
||||
param (
|
||||
[Parameter(Mandatory)]
|
||||
[string]$OutDir,
|
||||
[Parameter(Mandatory)][string]$Tag,
|
||||
[Parameter(Mandatory)][string]$PackageName
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$Url = "https://github.com/oven-sh/WebKit/releases/download/autobuild-$Tag/$PackageName.tar.gz"
|
||||
$CacheDir = (mkdir -Force (Join-Path $PSScriptRoot "../.cache"))
|
||||
$TarPath = Join-Path $CacheDir "$PackageName-$Tag.tar.gz"
|
||||
|
||||
if (Test-Path $OutDir\.tag) {
|
||||
$CurrentTag = Get-Content -Path (Join-Path $OutDir ".tag")
|
||||
if ($CurrentTag -eq $Tag) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
Remove-Item $OutDir -ErrorAction SilentlyContinue -Recurse
|
||||
$null = mkdir -Force $OutDir
|
||||
try {
|
||||
Write-Host "-- Downloading WebKit"
|
||||
if (!(Test-Path $TarPath)) {
|
||||
try {
|
||||
Invoke-WebRequest $Url -OutFile $TarPath -MaximumRetryCount 3 -RetryIntervalSec 1
|
||||
} catch {
|
||||
Write-Error "Failed to fetch WebKit from: $Url"
|
||||
throw $_
|
||||
}
|
||||
}
|
||||
|
||||
Push-Location $CacheDir
|
||||
tar.exe "-xzf" "$PackageName-$Tag.tar.gz" -C (Resolve-Path -Relative $OutDir\..\).replace('\', '/')
|
||||
Pop-Location
|
||||
|
||||
Set-Content -Path (Join-Path $OutDir ".tag") -Value "$Tag"
|
||||
} catch {
|
||||
Remove-Item -Force -ErrorAction SilentlyContinue $OutDir
|
||||
throw $_
|
||||
}
|
||||
85
scripts/download-webkit.sh
Executable file
85
scripts/download-webkit.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
OUTDIR="$1"
|
||||
TAG="$2"
|
||||
PKG="$3"
|
||||
|
||||
if [ -z "$OUTDIR" ]; then
|
||||
echo "Missing outdir"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$TAG" ]; then
|
||||
echo "Missing tag"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$PKG" ]; then
|
||||
echo "Missing package"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
url="https://github.com/oven-sh/WebKit/releases/download/autobuild-$TAG/$PKG.tar.gz"
|
||||
|
||||
old_tar_dir="$(dirname "$0")/../.webkit-cache"
|
||||
tar_dir="$(dirname "$0")/../.cache"
|
||||
if [ -d "$old_tar_dir" ]; then
|
||||
# migration step from the old system
|
||||
mkdir "$tar_dir"
|
||||
mv "$old_tar_dir"/* "$tar_dir"
|
||||
rm -r "$old_tar_dir"
|
||||
fi
|
||||
|
||||
tar="$tar_dir/$PKG-$TAG.tar.gz"
|
||||
|
||||
mkdir -p "$OUTDIR"
|
||||
mkdir -p "$tar_dir"
|
||||
|
||||
if [ -f "$OUTDIR/.tag" ]; then
|
||||
read_tag="$(cat "$OUTDIR/.tag")"
|
||||
if [ "$read_tag" == "$TAG-$PKG" ]; then
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf "$OUTDIR"
|
||||
|
||||
download () {
|
||||
local command="$1"
|
||||
local retries="$2"
|
||||
local options="$-"
|
||||
if [[ $options == *e* ]]; then
|
||||
set +e
|
||||
fi
|
||||
$command
|
||||
local exit_code=$?
|
||||
if [[ $options == *e* ]]; then
|
||||
set -e
|
||||
fi
|
||||
if [[ $exit_code -ne 0 && $retries -gt 0 ]]; then
|
||||
download "$command" $(($retries - 1))
|
||||
else
|
||||
return $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
# this is a big download so we will retry 5 times and ask curl to resume
|
||||
# download from where failure occurred if it fails and is rerun
|
||||
if [ ! -f "$tar" ]; then
|
||||
echo "-- Downloading WebKit"
|
||||
if ! download "curl -C - --http1.1 -o $tar.tmp -L $url" 5; then
|
||||
echo "Failed to download $url"
|
||||
exit 1
|
||||
else
|
||||
mv $tar.tmp $tar
|
||||
fi
|
||||
fi
|
||||
|
||||
tar -xzf "$tar" -C "$(dirname "$OUTDIR")" || (rm "$tar" && exit 1)
|
||||
|
||||
# We want to make sure we use the system-version of icucore on macOS
|
||||
if [ "$(uname)" == "Darwin" ]; then
|
||||
# delete the unicode folder from include
|
||||
rm -rf "$OUTDIR/include/unicode"
|
||||
fi
|
||||
|
||||
echo "$TAG-$PKG" >"$OUTDIR/.tag"
|
||||
42
scripts/download-zig.ps1
Executable file
42
scripts/download-zig.ps1
Executable file
@@ -0,0 +1,42 @@
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$ZigVersion="0.13.0"
|
||||
$Target="windows"
|
||||
$Arch="x86_64"
|
||||
|
||||
$Url = "https://ziglang.org/builds/zig-${Target}-${Arch}-${ZigVersion}.zip"
|
||||
$CacheDir = (mkdir -Force (Join-Path $PSScriptRoot "../.cache"))
|
||||
$TarPath = Join-Path $CacheDir "zig-${ZigVersion}.zip"
|
||||
$OutDir = Join-Path $CacheDir "zig"
|
||||
|
||||
if (Test-Path $OutDir\.tag) {
|
||||
$CurrentTag = Get-Content -Path (Join-Path $OutDir ".tag")
|
||||
if ($CurrentTag -eq $ZigVersion) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
Remove-Item $OutDir -ErrorAction SilentlyContinue -Recurse
|
||||
$null = mkdir -Force $OutDir
|
||||
Push-Location $CacheDir
|
||||
try {
|
||||
if (!(Test-Path $TarPath)) {
|
||||
try {
|
||||
Write-Host "-- Downloading Zig"
|
||||
Invoke-RestMethod $Url -OutFile $TarPath -MaximumRetryCount 3 -RetryIntervalSec 1
|
||||
} catch {
|
||||
Write-Error "Failed to fetch Zig from: $Url"
|
||||
throw $_
|
||||
}
|
||||
}
|
||||
|
||||
Remove-Item "$OutDir" -Recurse
|
||||
Expand-Archive "$TarPath" "$OutDir\..\"
|
||||
Move-Item "zig-$Target-$Arch-$ZigVersion" "zig"
|
||||
Set-Content -Path (Join-Path $OutDir ".tag") -Value "$ZigVersion"
|
||||
} catch {
|
||||
Remove-Item -Force -ErrorAction SilentlyContinue $OutDir
|
||||
throw $_
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
87
scripts/download-zig.sh
Executable file
87
scripts/download-zig.sh
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
cd $(dirname $(dirname "${BASH_SOURCE[0]}"))
|
||||
|
||||
zig_version=""
|
||||
if [ -n "$1" ]; then
|
||||
zig_version="$1"
|
||||
update_repo=true
|
||||
|
||||
if [ "$zig_version" == "master" ]; then
|
||||
zig_version=$(curl -fsSL https://ziglang.org/download/index.json | jq -r .master.version)
|
||||
fi
|
||||
else
|
||||
zig_version=$(grep 'recommended_zig_version = "' "build.zig" | cut -d'"' -f2)
|
||||
fi
|
||||
|
||||
case $(uname -ms) in
|
||||
'Darwin x86_64')
|
||||
target='macos'
|
||||
arch='x86_64'
|
||||
;;
|
||||
'Darwin arm64')
|
||||
target='macos'
|
||||
arch='aarch64'
|
||||
;;
|
||||
'Linux aarch64' | 'Linux arm64')
|
||||
target='linux'
|
||||
arch='aarch64'
|
||||
;;
|
||||
'Linux x86_64')
|
||||
target='linux'
|
||||
arch='x86_64'
|
||||
;;
|
||||
*)
|
||||
printf "error: cannot get platform name from '%s'\n" "${unamestr}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
url="https://ziglang.org/builds/zig-${target}-${arch}-${zig_version}.tar.xz"
|
||||
dest="$(pwd)/.cache/zig-${zig_version}.tar.xz"
|
||||
extract_at="$(pwd)/.cache/zig"
|
||||
|
||||
mkdir -p ".cache"
|
||||
|
||||
update_repo_if_needed() {
|
||||
if [ "$update_repo" == "true" ]; then
|
||||
files=(
|
||||
build.zig
|
||||
Dockerfile
|
||||
scripts/download-zig.ps1
|
||||
.github/workflows/*
|
||||
)
|
||||
|
||||
zig_version_previous=$(grep 'recommended_zig_version = "' "build.zig" | cut -d'"' -f2)
|
||||
|
||||
for file in ${files[@]}; do
|
||||
sed -i 's/'"${zig_version_previous}"'/'"${zig_version}"'/g' "$file"
|
||||
done
|
||||
|
||||
printf "Zig was updated to ${zig_version}. Please commit new files."
|
||||
fi
|
||||
# symlink extracted zig to extracted zig.exe
|
||||
# TODO: Workaround for https://github.com/ziglang/vscode-zig/issues/164
|
||||
ln -sf "${extract_at}/zig" "${extract_at}/zig.exe"
|
||||
chmod +x "${extract_at}/zig.exe"
|
||||
}
|
||||
|
||||
if [ -e "${extract_at}/.version" ]; then
|
||||
if [ "$(cat "${extract_at}/.version")" == "${url}" ]; then
|
||||
update_repo_if_needed
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! [ -e "${dest}" ]; then
|
||||
printf -- "-- Downloading Zig v%s\n" "${zig_version}"
|
||||
curl -o "$dest" -L "$url"
|
||||
fi
|
||||
|
||||
rm -rf "${extract_at}"
|
||||
mkdir -p "${extract_at}"
|
||||
tar -xf "${dest}" -C "${extract_at}" --strip-components=1
|
||||
|
||||
echo "${url}" > "${extract_at}/.version"
|
||||
|
||||
update_repo_if_needed
|
||||
145
scripts/env.ps1
Executable file
145
scripts/env.ps1
Executable file
@@ -0,0 +1,145 @@
|
||||
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
|
||||
|
||||
# this is the environment script for building bun's dependencies
|
||||
# it sets c compiler and flags
|
||||
$ScriptDir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
if ($env:VSINSTALLDIR -eq $null) {
|
||||
Write-Host "Loading Visual Studio environment, this may take a second..."
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Visual Studio installer directory not found."
|
||||
}
|
||||
$vsDir = (& $vswhere -prerelease -latest -property installationPath)
|
||||
if ($vsDir -eq $null) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory
|
||||
if ($vsDir -eq $null) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName;
|
||||
}
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$launchps = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
|
||||
. $launchps -Arch amd64 -HostArch amd64
|
||||
} finally { Pop-Location }
|
||||
}
|
||||
|
||||
if($Env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
# Please do not try to compile Bun for 32 bit. It will not work. I promise.
|
||||
throw "Visual Studio environment is targetting 32 bit. This configuration is definetly a mistake."
|
||||
}
|
||||
|
||||
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $ScriptDir '..' }
|
||||
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
|
||||
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'build\bun-deps' }
|
||||
|
||||
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-CimInstance -Class Win32_Processor).NumberOfCores }
|
||||
$Lto = if ($env:USE_LTO) { $env:USE_LTO -eq "1" } else { $False }
|
||||
$Baseline = if ($env:USE_BASELINE_BUILD) {
|
||||
$env:USE_BASELINE_BUILD -eq "1"
|
||||
} elseif ($env:BUILDKITE_STEP_KEY -match "baseline") {
|
||||
$True
|
||||
} else {
|
||||
$False
|
||||
}
|
||||
|
||||
$CC = "clang-cl"
|
||||
$CXX = "clang-cl"
|
||||
|
||||
$CFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL'
|
||||
$CXXFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL -Xclang -fno-c++-static-destructors'
|
||||
|
||||
# libarchive requires zlib headers for gzip compression support. without them, it will attempt to spawn a gzip process
|
||||
$CFLAGS += " /I$BUN_DEPS_DIR\zlib"
|
||||
|
||||
if ($Lto) {
|
||||
$CXXFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc"
|
||||
$CFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc"
|
||||
}
|
||||
|
||||
$CPU_NAME = if ($Baseline) { "nehalem" } else { "haswell" };
|
||||
$env:CPU_TARGET = $CPU_NAME
|
||||
|
||||
$CFLAGS += " -march=${CPU_NAME}"
|
||||
$CXXFLAGS += " -march=${CPU_NAME}"
|
||||
|
||||
$Canary = If ($env:CANARY) {
|
||||
$env:CANARY
|
||||
} ElseIf ($env:BUILDKITE -eq "true") {
|
||||
(buildkite-agent meta-data get canary)
|
||||
} Else {
|
||||
"1"
|
||||
}
|
||||
|
||||
$CMAKE_FLAGS = @(
|
||||
"-GNinja",
|
||||
"-DCMAKE_BUILD_TYPE=Release",
|
||||
"-DCMAKE_C_COMPILER=$CC",
|
||||
"-DCMAKE_CXX_COMPILER=$CXX",
|
||||
"-DCMAKE_C_FLAGS=$CFLAGS",
|
||||
"-DCMAKE_CXX_FLAGS=$CXXFLAGS",
|
||||
"-DCMAKE_C_FLAGS_RELEASE=$CFLAGS",
|
||||
"-DCMAKE_CXX_FLAGS_RELEASE=$CXXFLAGS",
|
||||
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
|
||||
"-DCANARY=$Canary"
|
||||
)
|
||||
|
||||
if (Get-Command llvm-lib -ErrorAction SilentlyContinue) {
|
||||
$AR_CMD = Get-Command llvm-lib -ErrorAction SilentlyContinue
|
||||
$AR = $AR_CMD.Path
|
||||
$env:AR = $AR
|
||||
$CMAKE_FLAGS += "-DCMAKE_AR=$AR"
|
||||
}
|
||||
|
||||
$env:CC = "clang-cl"
|
||||
$env:CXX = "clang-cl"
|
||||
$env:CFLAGS = $CFLAGS
|
||||
$env:CXXFLAGS = $CXXFLAGS
|
||||
$env:CPUS = $CPUS
|
||||
|
||||
if ($Baseline) {
|
||||
$CMAKE_FLAGS += "-DUSE_BASELINE_BUILD=ON"
|
||||
}
|
||||
|
||||
if ($Lto) {
|
||||
$CMAKE_FLAGS += "-DUSE_LTO=ON"
|
||||
}
|
||||
|
||||
if (Get-Command ccache -ErrorAction SilentlyContinue) {
|
||||
$CMAKE_FLAGS += "-DCMAKE_C_COMPILER_LAUNCHER=ccache"
|
||||
$CMAKE_FLAGS += "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
} elseif (Get-Command sccache -ErrorAction SilentlyContinue) {
|
||||
# Continue with local compiler if sccache has an error
|
||||
$env:SCCACHE_IGNORE_SERVER_IO_ERROR = "1"
|
||||
|
||||
$CMAKE_FLAGS += "-DCMAKE_C_COMPILER_LAUNCHER=sccache"
|
||||
$CMAKE_FLAGS += "-DCMAKE_CXX_COMPILER_LAUNCHER=sccache"
|
||||
$CMAKE_FLAGS += "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=Embedded"
|
||||
$CMAKE_FLAGS += "-DCMAKE_POLICY_CMP0141=NEW"
|
||||
}
|
||||
|
||||
$null = New-Item -ItemType Directory -Force -Path $BUN_DEPS_OUT_DIR
|
||||
|
||||
function Run() {
|
||||
# A handy way to run a command, and automatically throw an error if the
|
||||
# exit code is non-zero.
|
||||
|
||||
if ($args.Count -eq 0) {
|
||||
throw "Must supply some arguments."
|
||||
}
|
||||
|
||||
$command = $args[0]
|
||||
$commandArgs = @()
|
||||
if ($args.Count -gt 1) {
|
||||
$commandArgs = @($args[1..($args.Count - 1)] | % {$_})
|
||||
}
|
||||
|
||||
write-host "> $command $commandArgs"
|
||||
& $command $commandArgs
|
||||
$result = $LASTEXITCODE
|
||||
|
||||
if ($result -ne 0) {
|
||||
throw "$command $commandArgs exited with code $result."
|
||||
}
|
||||
}
|
||||
154
scripts/env.sh
Executable file
154
scripts/env.sh
Executable file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export CI=${CI:-0}
|
||||
export USE_LTO=${USE_LTO:-0}
|
||||
export FORCE_PIC=${FORCE_PIC:-}
|
||||
UNAME_OS="$(uname -s)"
|
||||
UNAME_ARCH="$(uname -m)"
|
||||
|
||||
export CMAKE_FLAGS="${CMAKE_FLAGS:-}"
|
||||
|
||||
# Hack for buildkite sometimes not having the right path
|
||||
if [[ "${CI:-}" == "1" || "${CI:-}" == "true" ]]; then
|
||||
if [ -f ~/.bashrc ]; then
|
||||
source ~/.bashrc
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $UNAME_OS == 'Darwin' ]]; then
|
||||
export LLVM_VERSION=18
|
||||
else
|
||||
export LLVM_VERSION=16
|
||||
fi
|
||||
|
||||
# this is the environment script for building bun's dependencies
|
||||
# it sets c compiler and flags
|
||||
export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
|
||||
export BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd "$SCRIPT_DIR" && cd .. && pwd)}
|
||||
export BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps}
|
||||
export BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/build/bun-deps}
|
||||
|
||||
# Silence a perl script warning
|
||||
export LC_CTYPE="en_US.UTF-8"
|
||||
export LC_ALL="en_US.UTF-8"
|
||||
|
||||
if [[ $UNAME_OS == 'Darwin' ]]; then
|
||||
export CXX="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang++"
|
||||
export CC="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang"
|
||||
export AR="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ar"
|
||||
export RANLIB="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ranlib"
|
||||
export LIBTOOL="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-libtool-darwin"
|
||||
export PATH="$(brew --prefix llvm)@$LLVM_VERSION/bin:$PATH"
|
||||
ln -sf $LIBTOOL "$(brew --prefix llvm)@$LLVM_VERSION/bin/libtool" || true
|
||||
elif [[ "$CI" != "1" && "$CI" != "true" ]]; then
|
||||
if [[ -f $SCRIPT_DIR/env.local ]]; then
|
||||
echo "Sourcing $SCRIPT_DIR/env.local"
|
||||
source $SCRIPT_DIR/env.local
|
||||
fi
|
||||
fi
|
||||
|
||||
# this compiler detection could be better
|
||||
export CC=${CC:-$(which clang-$LLVM_VERSION || which clang || which cc)}
|
||||
export CXX=${CXX:-$(which clang++-$LLVM_VERSION || which clang++ || which c++)}
|
||||
export AR=${AR:-$(which llvm-ar || which ar)}
|
||||
export CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
|
||||
export RANLIB=${RANLIB:-$(which llvm-ranlib-$LLVM_VERSION || which llvm-ranlib || which ranlib)}
|
||||
|
||||
# on Linux, force using lld as the linker
|
||||
if [[ $UNAME_OS == 'Linux' ]]; then
|
||||
export LD=${LD:-$(which ld.lld-$LLVM_VERSION || which ld.lld || which ld)}
|
||||
export LDFLAGS="${LDFLAGS:-} -fuse-ld=lld "
|
||||
fi
|
||||
|
||||
export CMAKE_CXX_COMPILER=${CXX}
|
||||
export CMAKE_C_COMPILER=${CC}
|
||||
|
||||
export FILE_PREFIX_MAP=" -ffile-prefix-map='${BUN_BASE_DIR}'=. -ffile-prefix-map='${BUN_DEPS_DIR}'=src/deps -ffile-prefix-map='${BUN_DEPS_OUT_DIR}'=src/deps "
|
||||
|
||||
export CFLAGS="-O3 -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables $FILE_PREFIX_MAP "
|
||||
export CXXFLAGS="-O3 -fno-exceptions -fno-rtti -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-c++-static-destructors $FILE_PREFIX_MAP "
|
||||
|
||||
# libarchive requires zlib headers for gzip compression support. without them, it will attempt to spawn a gzip process
|
||||
export CFLAGS="$CFLAGS -I$BUN_DEPS_DIR/zlib "
|
||||
|
||||
# Add flags for LTO
|
||||
# We cannot enable LTO on macOS for dependencies because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)
|
||||
if [ "$USE_LTO" == "1" ] || [ "$USE_LTO" == "ON" ]; then
|
||||
export CFLAGS="$CFLAGS -flto=full "
|
||||
export CXXFLAGS="$CXXFLAGS -flto=full -fwhole-program-vtables -fforce-emit-vtables "
|
||||
export LDFLAGS="${LDFLAGS:-} -flto=full -fwhole-program-vtables -fforce-emit-vtables "
|
||||
fi
|
||||
|
||||
if [[ $UNAME_OS == 'Linux' ]]; then
|
||||
export CFLAGS="$CFLAGS -ffunction-sections -fdata-sections -faddrsig "
|
||||
export CXXFLAGS="$CXXFLAGS -ffunction-sections -fdata-sections -faddrsig "
|
||||
export LDFLAGS="${LDFLAGS} -Wl,-z,norelro"
|
||||
fi
|
||||
|
||||
# Clang 18 on macOS needs to have -fno-define-target-os-macros to fix a zlib build issue
|
||||
# https://gitlab.kitware.com/cmake/cmake/-/issues/25755
|
||||
if [[ $UNAME_OS == 'Darwin' && $LLVM_VERSION == '18' ]]; then
|
||||
export CFLAGS="$CFLAGS -fno-define-target-os-macros "
|
||||
export CXXFLAGS="$CXXFLAGS -fno-define-target-os-macros -D_LIBCXX_ENABLE_ASSERTIONS=0 -D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE "
|
||||
fi
|
||||
|
||||
# libarchive needs position-independent executables to compile successfully
|
||||
if [ -n "$FORCE_PIC" ]; then
|
||||
export CFLAGS="$CFLAGS -fPIC "
|
||||
export CXXFLAGS="$CXXFLAGS -fPIC "
|
||||
elif [[ $UNAME_OS == 'Linux' ]]; then
|
||||
export CFLAGS="$CFLAGS -fno-pie -fno-pic "
|
||||
export CXXFLAGS="$CXXFLAGS -fno-pie -fno-pic "
|
||||
fi
|
||||
|
||||
if [[ $UNAME_OS == 'Linux' && ($UNAME_ARCH == 'aarch64' || $UNAME_ARCH == 'arm64') ]]; then
|
||||
export CFLAGS="$CFLAGS -march=armv8-a+crc -mtune=ampere1 "
|
||||
export CXXFLAGS="$CXXFLAGS -march=armv8-a+crc -mtune=ampere1 "
|
||||
fi
|
||||
|
||||
export CMAKE_FLAGS=(
|
||||
-DCMAKE_C_COMPILER="${CC}"
|
||||
-DCMAKE_CXX_COMPILER="${CXX}"
|
||||
-DCMAKE_C_FLAGS="$CFLAGS"
|
||||
-DCMAKE_CXX_FLAGS="$CXXFLAGS"
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DCMAKE_CXX_STANDARD=20
|
||||
-DCMAKE_C_STANDARD=17
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON
|
||||
-DCMAKE_C_STANDARD_REQUIRED=ON
|
||||
)
|
||||
|
||||
CCACHE=$(which ccache || which sccache || echo "")
|
||||
if [ -f "$CCACHE" ]; then
|
||||
CMAKE_FLAGS+=(
|
||||
-DCMAKE_C_COMPILER_LAUNCHER="$CCACHE"
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER="$CCACHE"
|
||||
)
|
||||
fi
|
||||
|
||||
if [[ $UNAME_OS == 'Linux' ]]; then
|
||||
# Ensure we always use -std=gnu++20 on Linux
|
||||
CMAKE_FLAGS+=(-DCMAKE_CXX_EXTENSIONS=ON)
|
||||
fi
|
||||
|
||||
if [[ $UNAME_OS == 'Darwin' ]]; then
|
||||
export CMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET:-13.0}
|
||||
CMAKE_FLAGS+=(-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET})
|
||||
export CFLAGS="$CFLAGS -mmacos-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 "
|
||||
export CXXFLAGS="$CXXFLAGS -mmacos-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 "
|
||||
fi
|
||||
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
echo "C Compiler: ${CC}"
|
||||
echo "C++ Compiler: ${CXX}"
|
||||
if [ -n "$CCACHE" ]; then
|
||||
echo "Ccache: ${CCACHE}"
|
||||
fi
|
||||
if [[ $UNAME_OS == 'Darwin' ]]; then
|
||||
echo "OSX Deployment Target: ${CMAKE_OSX_DEPLOYMENT_TARGET}"
|
||||
fi
|
||||
fi
|
||||
318
scripts/experimental-build.mjs
Executable file
318
scripts/experimental-build.mjs
Executable file
@@ -0,0 +1,318 @@
|
||||
#! /usr/bin/env node
|
||||
|
||||
import {} from "node:fs/promises";
|
||||
import { spawn, spawnSync } from "node:child_process";
|
||||
import { copyFileSync, existsSync, mkdirSync, mkdtempSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
|
||||
import { basename, dirname, join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
const isWindows = process.platform === "win32";
|
||||
const isMacOS = process.platform === "darwin";
|
||||
const isLinux = process.platform === "linux";
|
||||
|
||||
const cwd = dirname(import.meta.dirname);
|
||||
const spawnSyncTimeout = 1000 * 60;
|
||||
const spawnTimeout = 1000 * 60 * 3;
|
||||
|
||||
/**
|
||||
* @typedef {Object} S3UploadOptions
|
||||
* @property {string} [bucket]
|
||||
* @property {string} filename
|
||||
* @property {string} content
|
||||
* @property {Record<string, string>} [headers]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {S3UploadOptions} options
|
||||
*/
|
||||
async function uploadFileToS3(options) {
|
||||
const { AwsV4Signer } = await import("aws4fetch");
|
||||
|
||||
const { bucket, filename, content, ...extra } = options;
|
||||
const baseUrl = getEnv(["S3_ENDPOINT", "S3_BASE_URL", "AWS_ENDPOINT"], "https://s3.amazonaws.com");
|
||||
const bucketUrl = new URL(bucket || getEnv(["S3_BUCKET", "AWS_BUCKET"]), baseUrl);
|
||||
|
||||
const signer = new AwsV4Signer({
|
||||
accessKeyId: getSecret(["S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"]),
|
||||
secretAccessKey: getSecret(["S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"]),
|
||||
url: new URL(filename, bucketUrl),
|
||||
method: "PUT",
|
||||
body: content,
|
||||
...extra,
|
||||
});
|
||||
|
||||
const { url, method, headers, body } = signer.sign();
|
||||
await fetchSafe(url, {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
console.log("Uploaded file to S3:", {
|
||||
url: `${bucketUrl}`,
|
||||
filename,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} SentryRelease
|
||||
* @property {string} organizationId
|
||||
* @property {string} projectId
|
||||
* @property {string} version
|
||||
* @property {string} [url]
|
||||
* @property {string} [ref]
|
||||
* @property {string} [dateReleased]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {SentryRelease} options
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function createSentryRelease(options) {
|
||||
const { organizationId, projectId, ...body } = options;
|
||||
|
||||
const baseUrl = getEnv("SENTRY_BASE_URL", "https://sentry.io");
|
||||
const url = new URL(`api/0/organizations/${organizationId}/releases`, baseUrl);
|
||||
const accessToken = getSecret(["SENTRY_AUTH_TOKEN", "SENTRY_TOKEN"]);
|
||||
|
||||
const release = await fetchSafe(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
format: "json",
|
||||
});
|
||||
|
||||
console.log("Created Sentry release:", release);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
function getGithubToken() {
|
||||
const token = getEnv("GITHUB_TOKEN", null);
|
||||
if (token) {
|
||||
return token;
|
||||
}
|
||||
|
||||
const gh = which("gh");
|
||||
if (gh) {
|
||||
const { exitCode, stdout } = spawnSyncSafe(gh, ["auth", "token"]);
|
||||
if (exitCode === 0) {
|
||||
return stdout.trim();
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error("Failed to get GitHub token (set GITHUB_TOKEN or run `gh auth login`)");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} name
|
||||
* @return {string}
|
||||
*/
|
||||
function getSecret(name) {
|
||||
return getEnv(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} name
|
||||
* @param {string | null} [defaultValue]
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getEnv(name, defaultValue) {
|
||||
let result = defaultValue;
|
||||
|
||||
for (const key of typeof name === "string" ? [name] : name) {
|
||||
const value = process.env[key];
|
||||
if (value) {
|
||||
result = value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (result || result === null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new Error(`Environment variable is required: ${name}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} SpawnOptions
|
||||
* @property {boolean} [throwOnError]
|
||||
* @property {string} [cwd]
|
||||
* @property {string} [env]
|
||||
* @property {string} [encoding]
|
||||
* @property {number} [timeout]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SpawnResult
|
||||
* @property {number | null} exitCode
|
||||
* @property {number | null} signalCode
|
||||
* @property {string} stdout
|
||||
* @property {string} stderr
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} command
|
||||
* @param {string[]} [args]
|
||||
* @param {SpawnOptions} [options]
|
||||
* @returns {Promise<SpawnResult>}
|
||||
*/
|
||||
async function spawnSafe(command, args, options = {}) {
|
||||
const result = new Promise((resolve, reject) => {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
let subprocess;
|
||||
try {
|
||||
subprocess = spawn(command, args, {
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
timeout: spawnTimeout,
|
||||
...options,
|
||||
});
|
||||
subprocess.on("error", reject);
|
||||
subprocess.on("exit", (exitCode, signalCode) => {
|
||||
if (exitCode !== 0 || signalCode) {
|
||||
const reason = signalCode || `code ${exitCode}`;
|
||||
const cause = stderr || stdout;
|
||||
reject(new Error(`Process exited with ${reason}`, { cause }));
|
||||
} else {
|
||||
resolve({ exitCode, signalCode, stdout, stderr });
|
||||
}
|
||||
});
|
||||
subprocess?.stdout?.on("data", chunk => {
|
||||
process.stdout.write(chunk);
|
||||
stdout += chunk.toString("utf-8");
|
||||
});
|
||||
subprocess?.stderr?.on("data", chunk => {
|
||||
process.stderr.write(chunk);
|
||||
stderr += chunk.toString("utf-8");
|
||||
});
|
||||
} catch (cause) {
|
||||
reject(cause);
|
||||
}
|
||||
});
|
||||
try {
|
||||
return await result;
|
||||
} catch (cause) {
|
||||
if (options.throwOnError === false) {
|
||||
return;
|
||||
}
|
||||
const description = `${command} ${args.join(" ")}`;
|
||||
throw new Error(`Command failed: ${description}`, { cause });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} command
|
||||
* @param {string[]} [args]
|
||||
* @param {SpawnOptions} [options]
|
||||
* @returns {SpawnResult}
|
||||
*/
|
||||
function spawnSyncSafe(command, args, options = {}) {
|
||||
try {
|
||||
const { error, status, signal, stdout, stderr } = spawnSync(command, args, {
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
encoding: "utf-8",
|
||||
timeout: spawnSyncTimeout,
|
||||
...options,
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
if (signal || status !== 0) {
|
||||
const reason = signal || `code ${status}`;
|
||||
const cause = stderr || stdout;
|
||||
throw new Error(`Process exited with ${reason}`, { cause });
|
||||
}
|
||||
return stdout;
|
||||
} catch (cause) {
|
||||
if (options.throwOnError === false) {
|
||||
return;
|
||||
}
|
||||
const description = `${command} ${args.join(" ")}`;
|
||||
throw new Error(`Command failed: ${description}`, { cause });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} FetchOptions
|
||||
* @property {string} [method]
|
||||
* @property {Record<string, string>} [headers]
|
||||
* @property {string | Uint8Array} [body]
|
||||
* @property {"json" | "text" | "bytes"} [format]
|
||||
* @property {boolean} [throwOnError]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string | URL} url
|
||||
* @param {FetchOptions} [options]
|
||||
* @returns {Promise<Response | string | Uint8Array>}
|
||||
*/
|
||||
async function fetchSafe(url, options = {}) {
|
||||
let response;
|
||||
try {
|
||||
response = await fetch(url, options);
|
||||
if (!response.ok) {
|
||||
const { status, statusText } = response;
|
||||
const body = await response.text();
|
||||
throw new Error(`${status} ${statusText}`, { cause: body });
|
||||
}
|
||||
switch (options.format) {
|
||||
case "json":
|
||||
return await response.json();
|
||||
case "text":
|
||||
return await response.text();
|
||||
case "bytes":
|
||||
return new Uint8Array(await response.arrayBuffer());
|
||||
default:
|
||||
return response;
|
||||
}
|
||||
} catch (cause) {
|
||||
if (options.throwOnError === false) {
|
||||
return response;
|
||||
}
|
||||
throw new Error(`Fetch failed: ${url}`, { cause });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} command
|
||||
* @param {string} [path]
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function which(command, path) {
|
||||
const cmd = isWindows ? "where" : "which";
|
||||
const result = spawnSyncSafe(cmd, [command], {
|
||||
throwOnError: false,
|
||||
env: {
|
||||
PATH: path || process.env.PATH,
|
||||
},
|
||||
});
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
if (isWindows) {
|
||||
// On Windows, multiple paths can be returned from `where`.
|
||||
for (const line of result.split("\r\n")) {
|
||||
return line;
|
||||
}
|
||||
}
|
||||
return result.trimEnd();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} execPath
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getVersion(execPath) {
|
||||
const args = /(?:zig)(?:\.exe)?/i.test(execPath) ? ["version"] : ["--version"];
|
||||
const result = spawnSyncSafe(execPath, args, { throwOnError: false });
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
return result.trim();
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
// @bun
|
||||
// Used to generate a features.json file after building Bun.
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { crash_handler } from "bun:internal-for-testing";
|
||||
|
||||
writeFileSync("./features.json", JSON.stringify(crash_handler.getFeatureData()));
|
||||
7
scripts/internal-test.ps1
Executable file
7
scripts/internal-test.ps1
Executable file
@@ -0,0 +1,7 @@
|
||||
$Root = (Join-Path $PSScriptRoot "../")
|
||||
|
||||
Push-Location (Join-Path $Root "packages\bun-internal-test")
|
||||
try {
|
||||
npm i
|
||||
node src\runner.node.mjs
|
||||
} finally { Pop-Location }
|
||||
47
scripts/make-old-js.ps1
Executable file
47
scripts/make-old-js.ps1
Executable file
@@ -0,0 +1,47 @@
|
||||
$npm_client = "npm"
|
||||
|
||||
# & ${npm_client} i
|
||||
|
||||
$root = Join-Path (Split-Path -Path $MyInvocation.MyCommand.Definition -Parent) "..\"
|
||||
|
||||
# search for .cmd or .exe
|
||||
function Get-Esbuild-Path {
|
||||
param(
|
||||
$Path
|
||||
)
|
||||
|
||||
$Result = Join-Path $Path "node_modules\.bin\esbuild.cmd"
|
||||
if (Test-Path $Result) {
|
||||
return $Result
|
||||
}
|
||||
|
||||
return Join-Path $Path "node_modules\.bin\esbuild.exe"
|
||||
}
|
||||
|
||||
$esbuild = Get-Esbuild-Path $root
|
||||
|
||||
$env:NODE_ENV = "production"
|
||||
|
||||
# runtime.js
|
||||
echo $esbuild
|
||||
& ${esbuild} `
|
||||
"--target=esnext" "--bundle" `
|
||||
"src/runtime.bun.js" `
|
||||
"--format=esm" "--platform=node" "--minify" "--external:/bun:*" `
|
||||
"--outfile=src/runtime.out.js"
|
||||
if ($LASTEXITCODE -ne 0) { throw "esbuild failed with exit code $LASTEXITCODE" }
|
||||
|
||||
# fallback_decoder
|
||||
& ${esbuild} --target=esnext --bundle src/fallback.ts --format=iife --platform=browser --minify > src/fallback.out.js
|
||||
|
||||
# bun-error
|
||||
Push-Location packages\bun-error
|
||||
& ${npm_client} install
|
||||
& ${npm_client} run build
|
||||
Pop-Location
|
||||
|
||||
# node-fallbacks
|
||||
Push-Location src\node-fallbacks
|
||||
& ${npm_client} install
|
||||
& (Get-Esbuild-Path (Get-Location)) --bundle @(Get-Item .\*.js) --outdir=out --format=esm --minify --platform=browser
|
||||
Pop-Location
|
||||
@@ -146,7 +146,7 @@ async function runTests() {
|
||||
},
|
||||
});
|
||||
if (!error) {
|
||||
break downloadLoop;
|
||||
break;
|
||||
}
|
||||
const { code } = error;
|
||||
if (code === "EBUSY") {
|
||||
@@ -917,21 +917,10 @@ function getRelevantTests(cwd) {
|
||||
filteredTests.push(...Array.from(smokeTests));
|
||||
console.log("Smoking tests:", filteredTests.length, "/", availableTests.length);
|
||||
} else if (maxShards > 1) {
|
||||
for (let i = 0; i < availableTests.length; i++) {
|
||||
if (i % maxShards === shardId) {
|
||||
filteredTests.push(availableTests[i]);
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
"Sharding tests:",
|
||||
shardId,
|
||||
"/",
|
||||
maxShards,
|
||||
"with tests",
|
||||
filteredTests.length,
|
||||
"/",
|
||||
availableTests.length,
|
||||
);
|
||||
const firstTest = shardId * Math.ceil(availableTests.length / maxShards);
|
||||
const lastTest = Math.min(firstTest + Math.ceil(availableTests.length / maxShards), availableTests.length);
|
||||
filteredTests.push(...availableTests.slice(firstTest, lastTest));
|
||||
console.log("Sharding tests:", firstTest, "...", lastTest, "/", availableTests.length);
|
||||
} else {
|
||||
filteredTests.push(...availableTests);
|
||||
}
|
||||
|
||||
22
scripts/set-webkit-submodule-to-cmake.ps1
Executable file
22
scripts/set-webkit-submodule-to-cmake.ps1
Executable file
@@ -0,0 +1,22 @@
|
||||
# Navigate to the parent directory of the script
|
||||
$scriptPath = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
Push-Location $scriptPath\..
|
||||
try {
|
||||
# Get the WEBKIT_TAG value from CMakeLists.txt
|
||||
$WEBKIT_TAG = Select-String -Path 'CMakeLists.txt' -Pattern 'set\(WEBKIT_TAG (.*?)\)' | ForEach-Object { $_.Matches.Groups[1].Value }
|
||||
if (-not $WEBKIT_TAG) {
|
||||
Write-Host "Could not find WEBKIT_TAG in CMakeLists.txt"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "Setting WebKit submodule to $WEBKIT_TAG"
|
||||
|
||||
# Navigate to the WebKit submodule directory
|
||||
Set-Location src/bun.js/WebKit
|
||||
|
||||
# Fetch and reset the submodule to the specified tag
|
||||
git fetch origin "$WEBKIT_TAG"
|
||||
git reset --hard "$WEBKIT_TAG"
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
15
scripts/set-webkit-submodule-to-cmake.sh
Executable file
15
scripts/set-webkit-submodule-to-cmake.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exo pipefail
|
||||
|
||||
cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.."
|
||||
|
||||
WEBKIT_TAG=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
|
||||
if [ -z "${WEBKIT_TAG}" ]; then
|
||||
echo "Could not find WEBKIT_TAG in CMakeLists.txt"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Setting WebKit submodule to ${WEBKIT_TAG}"
|
||||
cd src/bun.js/WebKit
|
||||
git fetch origin "${WEBKIT_TAG}"
|
||||
git reset --hard "${WEBKIT_TAG}"
|
||||
9
scripts/setup.ps1
Executable file
9
scripts/setup.ps1
Executable file
@@ -0,0 +1,9 @@
|
||||
throw "This script is not yet complete";
|
||||
|
||||
npm i
|
||||
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\all-dependencies.ps1
|
||||
.\scripts\make-old-js.ps1
|
||||
|
||||
New-Item -Type SymbolicLink -Path .\.vscode\clang++ -Value (Get-Command clang-cl).Source
|
||||
146
scripts/setup.sh
Executable file
146
scripts/setup.sh
Executable file
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
C_BOLD="\e[1;1m"
|
||||
C_GREEN="\e[32m"
|
||||
C_RED="\e[31m"
|
||||
C_BLUE="\e[34m"
|
||||
C_RESET="\e[0m"
|
||||
|
||||
has_exec() {
|
||||
which "$1" >/dev/null 2>&1 || return 1
|
||||
}
|
||||
fail() {
|
||||
has_failure=1
|
||||
printf "${C_RED}setup error${C_RESET}: %s\n" "$@"
|
||||
}
|
||||
|
||||
if [[ $(uname -s) == 'Darwin' ]]; then
|
||||
export LLVM_VERSION=18
|
||||
|
||||
# Use from brew --prefix if available
|
||||
if has_exec brew; then
|
||||
export PKG_CONFIG_PATH=$(brew --prefix)/lib/pkgconfig:$PKG_CONFIG_PATH
|
||||
|
||||
# if llvm@18/bin/clang exists, use it
|
||||
if [ -x "$(brew --prefix)/opt/llvm@$LLVM_VERSION/bin/clang" ]; then
|
||||
export PATH=$(brew --prefix)/opt/llvm@$LLVM_VERSION/bin:$PATH
|
||||
export CC=$(brew --prefix)/opt/llvm@$LLVM_VERSION/bin/clang
|
||||
export CXX=$(brew --prefix)/opt/llvm@$LLVM_VERSION/bin/clang++
|
||||
export AR=$(brew --prefix)/opt/llvm@$LLVM_VERSION/bin/llvm-ar
|
||||
else
|
||||
export CC=$(which clang-$LLVM_VERSION || which clang || which cc)
|
||||
export CXX=$(which clang++-$LLVM_VERSION || which clang++ || which c++)
|
||||
export AR=$(which llvm-ar-$LLVM_VERSION || which llvm-ar || which ar)
|
||||
fi
|
||||
fi
|
||||
|
||||
test -n "$CC" || fail "missing LLVM $LLVM_VERSION (could not find clang)"
|
||||
test -n "$CXX" || fail "missing LLVM $LLVM_VERSION (could not find clang++)"
|
||||
else
|
||||
export LLVM_VERSION=16
|
||||
|
||||
export CC=$(which clang-$LLVM_VERSION || which clang || which cc)
|
||||
export CXX=$(which clang++-$LLVM_VERSION || which clang++ || which c++)
|
||||
export AR=$(which llvm-ar-$LLVM_VERSION || which llvm-ar || which ar)
|
||||
fi
|
||||
|
||||
test -n "$CC" || fail "missing LLVM $LLVM_VERSION (could not find clang)"
|
||||
test -n "$CXX" || fail "missing LLVM $LLVM_VERSION (could not find clang++)"
|
||||
|
||||
for type in CC CXX; do
|
||||
compiler="${!type}"
|
||||
$(
|
||||
"$compiler" --version | grep "clang version ${LLVM_VERSION}." >/dev/null 2>&1
|
||||
) || fail "LLVM ${LLVM_VERSION} is required. Detected $type as '$compiler'"
|
||||
done
|
||||
|
||||
has_exec "bun" || fail "you need an existing copy of 'bun' in your path to build bun"
|
||||
has_exec "cmake" || fail "'cmake' is missing"
|
||||
has_exec "ninja" || fail "'ninja' is missing"
|
||||
$(
|
||||
has_exec "rustc" &&
|
||||
(test $(cargo --version | awk '{print $2}' | cut -d. -f2) -gt 57) &&
|
||||
has_exec "cargo"
|
||||
) || fail "Rust and Cargo version must be installed (minimum version 1.57)"
|
||||
has_exec "go" || fail "'go' is missing"
|
||||
|
||||
has_exec "${PKG_CONFIG:-pkg-config}" || fail "'pkg-config' is missing"
|
||||
has_exec "automake" || fail "'automake' is missing"
|
||||
has_exec "perl" || fail "'perl' is missing"
|
||||
has_exec "ruby" || fail "'ruby' is missing"
|
||||
|
||||
if [ -n "$has_failure" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f .vscode/clang++
|
||||
ln -s "$CXX" .vscode/clang++
|
||||
|
||||
printf "All system dependencies OK\n"
|
||||
printf "C Compiler for dependencies: ${CC}\n"
|
||||
printf "C++ Compiler for dependencies: ${CXX}\n"
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
rm -rf env.local
|
||||
echo "# Environment variables as of last setup.sh run at $(date)" >env.local
|
||||
echo "export CC=\"${CC}\"" >>env.local
|
||||
echo "export CXX\"=${CXX}\"" >>env.local
|
||||
echo "export AR=\"${AR}\"" >>env.local
|
||||
echo "export PATH=\"${PATH}\"" >>env.local
|
||||
echo "Saved environment variables to $(pwd)/env.local"
|
||||
|
||||
bash ./update-submodules.sh
|
||||
bash ./all-dependencies.sh
|
||||
|
||||
cd ../
|
||||
|
||||
# Install bun dependencies
|
||||
bun i
|
||||
# Install test dependencies
|
||||
cd test
|
||||
bun i
|
||||
cd ..
|
||||
|
||||
# TODO(@paperdave): do not use the Makefile please
|
||||
has_exec "make" || fail "'make' is missing"
|
||||
make runtime_js fallback_decoder bun_error node-fallbacks
|
||||
|
||||
mkdir -p build
|
||||
rm -f build/CMakeCache.txt
|
||||
cmake -B build -S . \
|
||||
-G Ninja \
|
||||
-DUSE_DEBUG_JSC=ON \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DCMAKE_C_COMPILER="$CC" \
|
||||
-DCMAKE_CXX_COMPILER="$CXX" \
|
||||
-UZIG_COMPILER "$*"
|
||||
|
||||
ninja -C build
|
||||
|
||||
printf "Checking if built bun functions\n"
|
||||
BUN_VERSION=$(BUN_DEBUG_QUIET_LOGS=1 ./build/bun-debug --version)
|
||||
|
||||
printf "\n"
|
||||
printf "🎉 ${C_GREEN}${C_BOLD}Development environment setup complete!${C_RESET}\n"
|
||||
printf "${C_BLUE}bun v${BUN_VERSION} is located at ./build/bun-debug${C_RESET}\n"
|
||||
|
||||
if has_exec bun-debug; then
|
||||
bun_is_at=$(which bun-debug)
|
||||
if [ "$(realpath "$bun_is_at")" != "$(realpath "./build/bun-debug")" ]; then
|
||||
printf "\n"
|
||||
printf "${C_RED}"'Your $PATH is not configured correctly!\n'"${C_RESET}"
|
||||
printf "\n"
|
||||
printf "which bun-debug --> %s\n" "${bun_is_at}"
|
||||
printf "\n"
|
||||
printf "You should remove this binary and switch it to ./build:\n"
|
||||
printf ' export PATH="$PATH:%s"\n' $(realpath "$PWD/build")
|
||||
fi
|
||||
else
|
||||
printf "\n"
|
||||
printf "You should add ./build to your path:\n"
|
||||
printf ' export PATH="$PATH:%s"\n' $(realpath "$PWD/build")
|
||||
fi
|
||||
printf "\n"
|
||||
printf "To rebuild bun, run '${C_GREEN}bun run build${C_RESET}'\n\n"
|
||||
25
scripts/update-submodules.ps1
Executable file
25
scripts/update-submodules.ps1
Executable file
@@ -0,0 +1,25 @@
|
||||
param(
|
||||
[switch]$WebKit = $false
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$ScriptDir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
Push-Location (Join-Path $ScriptDir '..')
|
||||
try {
|
||||
$Names = Get-Content .gitmodules | Select-String 'path = (.*)' | ForEach-Object { $_.Matches.Groups[1].Value }
|
||||
|
||||
# we will exclude webkit unless you explicitly clone it yourself (a huge download)
|
||||
if (!($WebKit) -and (-not (Test-Path "src/bun.js/WebKit/.git"))) {
|
||||
$Names = $Names | Where-Object { $_ -ne 'src/bun.js/WebKit' }
|
||||
}
|
||||
if ($env:FORCE_UPDATE_SUBMODULES -eq "1") {
|
||||
# Set --force in CI.
|
||||
git submodule update --init --recursive --progress --depth 1 --checkout --force @NAMES
|
||||
} else {
|
||||
git submodule update --init --recursive --progress --depth 1 --checkout @NAMES
|
||||
}
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "git submodule update failed"
|
||||
}
|
||||
} finally { Pop-Location }
|
||||
25
scripts/update-submodules.sh
Executable file
25
scripts/update-submodules.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
FORCE_UPDATE_SUBMODULES=${FORCE_UPDATE_SUBMODULES:-0}
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
cd ..
|
||||
NAMES=$(cat .gitmodules | grep 'path = ' | awk '{print $3}')
|
||||
|
||||
if ! [ "${1:-}" == '--webkit' ]; then
|
||||
# we will exclude webkit unless you explicitly clone it yourself (a huge download)
|
||||
if [ ! -e "src/bun.js/WebKit/.git" ]; then
|
||||
NAMES=$(echo "$NAMES" | grep -v 'WebKit')
|
||||
fi
|
||||
fi
|
||||
|
||||
set -exo pipefail
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout $NAMES
|
||||
if [ "$FORCE_UPDATE_SUBMODULES" == "1" ]; then
|
||||
# Set --force in CI.
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout --force $NAMES
|
||||
else
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout $NAMES
|
||||
fi
|
||||
@@ -1,46 +0,0 @@
|
||||
# Ensures that commands run in a Visual Studio environment.
|
||||
# This is required to run commands like cmake and ninja on Windows.
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
if($env:VSINSTALLDIR -eq $null) {
|
||||
Write-Host "Loading Visual Studio environment, this may take a second..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
$vsDir = (& $vswhere -prerelease -latest -property installationPath)
|
||||
if ($vsDir -eq $null) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory
|
||||
if ($vsDir -eq $null) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
}
|
||||
|
||||
if($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
$command = $args[0]
|
||||
$commandArgs = @()
|
||||
if ($args.Count -gt 1) {
|
||||
$commandArgs = @($args[1..($args.Count - 1)] | % {$_})
|
||||
}
|
||||
|
||||
Write-Host "$ $command $commandArgs"
|
||||
& $command $commandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
Reference in New Issue
Block a user