Compare commits

..

6 Commits

Author SHA1 Message Date
Jarred Sumner
a4d46fc7db 😪 2022-09-25 13:08:51 -07:00
Jarred SUmner
7ce4a4e3d3 Make Linux implementation work 2022-09-25 10:13:42 -07:00
Jarred Sumner
d2b81fa7c9 Linux implementation 2022-09-25 09:19:09 -07:00
Jarred Sumner
a8ab899816 wip 2022-09-24 19:03:31 -07:00
Jarred Sumner
86efa17895 Update package.json 2022-09-23 09:04:04 -07:00
Jarred Sumner
68f45ef54e 📷 2022-09-23 09:03:51 -07:00
32078 changed files with 187818 additions and 857473 deletions

27
.devcontainer/README.md Normal file
View File

@@ -0,0 +1,27 @@
# Bun's Dev Container
To get started, login to GitHub and clone bun's GitHub repo into `/workspaces/bun`
# First time setup
```bash
gh repo clone oven-sh/bun . -- --depth=1 --progress -j8
```
# Compile bun dependencies (zig is already compiled)
```bash
make devcontainer
```
# Build bun for development
```bash
make dev
```
# Run bun
```bash
bun-debug help
```

View File

@@ -0,0 +1,70 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
{
"name": "bun (Ubuntu)",
// Sets the run context to one level up instead of the .devcontainer folder.
"context": "..",
"hostRequirements": { "memory": "16gb" },
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
"dockerFile": "../Dockerfile.devcontainer",
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": "/bin/zsh",
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
"zig.zigPath": "/build/zig/zig",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"AugusteRame.zls-vscode",
"ms-vscode.cpptools",
"/home/ubuntu/vscode-zig.vsix",
"vadimcn.vscode-lldb",
"esbenp.prettier-vscode",
"xaver.clang-format"
],
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; code /build/README.md",
"build": {
"target": "bun.devcontainer",
"cacheFrom": ["ghcr.io/oven-sh/bun.devcontainer:latest"],
"args": {
"BUILDARCH": "${localEnv:DOCKER_BUILDARCH:amd64}",
"--platform": "linux/${localEnv:DOCKER_BUILDARCH:amd64}",
"--tag": "ghcr.io/oven-sh/bun.devcontainer:latest"
}
},
"runArgs": [
"--ulimit",
"memlock=-1:-1",
"--ulimit",
"nofile=65536:65536",
"--cap-add=SYS_PTRACE",
"--security-opt",
"seccomp=unconfined"
],
"workspaceMount": "source=bun,target=/build/bun,type=volume",
"workspaceFolder": "/build/bun",
"mounts": [
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
"source=bun-config,target=/home/ubuntu/.config,type=volume"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [3000, 8081, 8080]
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode"
}

61
.devcontainer/limits.conf Normal file
View File

@@ -0,0 +1,61 @@
# /etc/security/limits.conf
#
#Each line describes a limit for a user in the form:
#
#<domain> <type> <item> <value>
#
#Where:
#<domain> can be:
# - a user name
# - a group name, with @group syntax
# - the wildcard *, for default entry
# - the wildcard %, can be also used with %group syntax,
# for maxlogin limit
# - NOTE: group and wildcard limits are not applied to root.
# To apply a limit to the root user, <domain> must be
# the literal username root.
#
#<type> can have the two values:
# - "soft" for enforcing the soft limits
# - "hard" for enforcing hard limits
#
#<item> can be one of the following:
# - core - limits the core file size (KB)
# - data - max data size (KB)
# - fsize - maximum filesize (KB)
# - memlock - max locked-in-memory address space (KB)
# - nofile - max number of open file descriptors
# - rss - max resident set size (KB)
# - stack - max stack size (KB)
# - cpu - max CPU time (MIN)
# - nproc - max number of processes
# - as - address space limit (KB)
# - maxlogins - max number of logins for this user
# - maxsyslogins - max number of logins on the system
# - priority - the priority to run user process with
# - locks - max number of file locks the user can hold
# - sigpending - max number of pending signals
# - msgqueue - max memory used by POSIX message queues (bytes)
# - nice - max nice priority allowed to raise to values: [-20, 19]
# - rtprio - max realtime priority
# - chroot - change root to directory (Debian-specific)
#
#<domain> <type> <item> <value>
#
* soft memlock 33554432
* hard memlock 33554432
* soft nofile 33554432
* hard nofile 33554432
#* soft core 0
#root hard core 100000
#* hard rss 10000
#@student hard nproc 20
#@faculty soft nproc 20
#@faculty hard nproc 50
#ftp hard nproc 0
#ftp - chroot /ftp
#@student - maxlogins 4
# End of file

View File

@@ -0,0 +1,454 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="apt-utils \
openssh-client \
gnupg2 \
dirmngr \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust[0-9] \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
. /etc/os-release
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
else
apt_get_update_if_needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
package_list="${package_list} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${package_list}"
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apt-get -y install --no-install-recommends git
fi
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt_get_update_if_needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
group_name="${USERNAME}"
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
group_name="$(id -gn $USERNAME)"
groupmod --gid $USER_GID ${group_name}
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
user_rc_path="/root"
else
user_rc_path="/home/${USERNAME}"
fi
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
fi
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
cp /etc/skel/.profile "${user_rc_path}/.profile"
fi
# .bashrc/.zshrc snippet
rc_snippet="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
# Set the default git editor if not already set
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
if [ "${TERM_PROGRAM}" = "vscode" ]; then
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
export GIT_EDITOR="code-insiders --wait"
else
export GIT_EDITOR="code --wait"
fi
fi
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
codespaces_bash="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
codespaces_zsh="$(cat \
<<'EOF'
# Codespaces zsh prompt theme
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/bash.bashrc
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${codespaces_bash}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w
mkdir -p ${oh_my_install_dir}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
mkdir -p ${oh_my_install_dir}/custom/themes
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
meta_info_script="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"

View File

@@ -0,0 +1,16 @@
#!/bin/bash
echo "To get started, login to GitHub and clone bun's GitHub repo into /workspaces/bun"
echo "Make sure to login with a Personal Access Token"
echo "# First time setup"
echo "gh auth login"
echo "gh repo clone oven-sh/bun . -- --depth=1 --progress -j8"
echo ""
echo "# Compile bun dependencies (zig is already compiled)"
echo "make devcontainer"
echo ""
echo "# Build bun for development"
echo "make dev"
echo ""
echo "# Run bun"
echo "bun-debug"

View File

@@ -0,0 +1,207 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./github-debian.sh [version]
CLI_VERSION=${1:-"latest"}
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com"
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
local keyring_args=""
if [ ! -z "$2" ]; then
keyring_args="--no-default-keyring --keyring $2"
fi
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Function to run apt-get if needed
apt_get_update_if_needed() {
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Checks if packages are installed and installs them if not
check_packages() {
if ! dpkg -s "$@" >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends "$@"
fi
}
# Fall back on direct download if no apt package exists
# Fetches .deb file to be installed with dpkg
install_deb_using_github() {
check_packages wget
arch=$(dpkg --print-architecture)
find_version_from_git_tags CLI_VERSION https://github.com/cli/cli
cli_filename="gh_${CLI_VERSION}_linux_${arch}.deb"
mkdir -p /tmp/ghcli
pushd /tmp/ghcli
wget https://github.com/cli/cli/releases/download/v${CLI_VERSION}/${cli_filename}
dpkg -i /tmp/ghcli/${cli_filename}
popd
rm -rf /tmp/ghcli
}
export DEBIAN_FRONTEND=noninteractive
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
if ! type git >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends git
fi
# Soft version matching
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
version_suffix="=${CLI_VERSION}"
else
version_suffix=""
fi
# Install the GitHub CLI
echo "Downloading github CLI..."
install_deb_using_github
# Method below does not work until cli/cli#6175 is fixed
# # Import key safely (new method rather than deprecated apt-key approach) and install
# . /etc/os-release
# receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
# echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" >/etc/apt/sources.list.d/github-cli.list
# apt-get update
# apt-get -y install "gh${version_suffix}"
# rm -rf "/tmp/gh/gnupg"
echo "Done!"

View File

@@ -0,0 +1,7 @@
#!/bin/bash
chsh -s $(which zsh)
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl" -- --yes
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix

View File

@@ -0,0 +1,7 @@
#!/bin/bash
curl -L https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
git clone https://github.com/oven-sh/zls /home/ubuntu/zls
cd /home/ubuntu/zls
git submodule update --init --recursive --progress --depth=1
zig build -Drelease-fast

View File

@@ -0,0 +1,9 @@
{
"folders": [
{
// Source code
"name": "bun",
"path": "bun"
},
]
}

9
.devcontainer/zls.json Normal file
View File

@@ -0,0 +1,9 @@
{
"zig_exe_path": "/build/zig/zig",
"enable_snippets": true,
"warn_style": false,
"enable_semantic_tokens": true,
"operator_completions": true,
"include_at_in_builtins": false,
"max_detail_length": 1048576
}

View File

@@ -1,8 +0,0 @@
# https://EditorConfig.org
root = true
[*]
charset = utf-8
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf

47
.gitattributes vendored
View File

@@ -1,22 +1,3 @@
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
.vscode/launch.json linguist-generated
src/api/schema.d.ts linguist-generated
fixture.*.c linguist-generated
@@ -24,30 +5,4 @@ src/api/schema.js linguist-generated
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.lockb binary diff=lockb
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
*.zig text eol=lf

44
.github/ISSUE_TEMPLATE/1-bug-report.yml vendored Normal file
View File

@@ -0,0 +1,44 @@
name: 🐛 Bug report
description: Create a report to help us improve
labels: [bug, need repro]
body:
- type: markdown
attributes:
value: |
Thank you for reporting an issue.
This issue tracker is for bugs and issues found within bun.
If you require more general support please reach out to our [discord server](https://bun.sh/discord).
Please fill in as much of the following form as you're able.
- type: input
attributes:
label: Version
description: Output of `bun -v`
- type: input
attributes:
label: Platform
description: |
UNIX: output of `uname -a`
Windows: output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in PowerShell console
- type: textarea
attributes:
label: What steps will reproduce the bug?
description: Enter details about your bug, preferably a simple code snippet that can be run using `bun` directly without installing third-party dependencies.
- type: textarea
attributes:
label: How often does it reproduce? Is there a required condition?
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide textual output instead of screenshots.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide textual output instead of screenshots.
validations:
required: true
- type: textarea
attributes:
label: Additional information
description: Tell us anything else you think we should know.

View File

@@ -1,35 +0,0 @@
name: 📥 Install Problem
description: Report an issue during install or upgrade
labels: [bug, install]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting an issue, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Please try to include as much information as possible.
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: How did you attempt to install or upgrade?
description: Please provide the commands you ran to install or upgrade.
validations:
required: true
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
validations:
required: true
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -1,41 +0,0 @@
name: 🐛 Bug Report
description: Report an issue that should be fixed
labels: [bug]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -0,0 +1,25 @@
name: 🚀 Feature request
description: Suggest an idea for this project
labels: [enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for suggesting an idea to make bun better.
Please fill in as much of the following form as you're able.
For more information on development, please reach out to our [discord server](https://bun.sh/discord).
- type: textarea
attributes:
label: What is the problem this feature will solve?
validations:
required: true
- type: textarea
attributes:
label: What is the feature you are proposing to solve the problem?
validations:
required: true
- type: textarea
attributes:
label: What alternatives have you considered?

View File

@@ -0,0 +1,18 @@
name: 📗 Open an issue regarding the bun docs
description: Let us know about any problematic documentations
labels: [documentation]
body:
- type: markdown
attributes:
value: |
Thank you for wanting to make bun better!
Please fill in as much of the following form as you're able.
- type: input
attributes:
label: Affected URL(s)
- type: textarea
attributes:
label: Description of the problem
validations:
required: true

View File

@@ -1,24 +0,0 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for submitting an idea. It helps make Bun better.
If you want to discuss Bun, or learn how others are using Bun, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
- type: textarea
attributes:
label: What is the problem this feature would solve?
validations:
required: true
- type: textarea
attributes:
label: What is the feature you are proposing to solve the problem?
validations:
required: true
- type: textarea
attributes:
label: What alternatives have you considered?

View File

@@ -1,29 +0,0 @@
name: 📗 Documentation Issue
description: Tell us if there is missing or incorrect documentation
labels: [docs]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a documentation request. It helps make Bun better.
We are working on moving documentation from the [README](https://github.com/oven-sh/bun#table-of-contents) to a documentation website. Please report as many issues or missing content requests as you can so we can incoperate that in the new documentation.
- type: dropdown
attributes:
label: What is the type of issue?
multiple: true
options:
- Documentation is missing
- Documentation is incorrect
- Documentation is confusing
- Example code is not working
- Something else
- type: textarea
attributes:
label: What is the issue?
validations:
required: true
- type: textarea
attributes:
label: Where did you find it?
description: If possible, please provide the URL(s) where you found this issue.

View File

@@ -1,5 +1,5 @@
blank_issues_enabled: true
contact_links:
- name: 💬 Ask a Question
- name: Discord server
url: https://discord.com/invite/CXdq2DP29u
about: Join our Discord server for questions, support requests, or just to chat.
about: Please visit our Discord server for questions and support requests.

52
.github/labeler.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
chore:
- any: ['*', '**/*']
all: ['!packages/*', '!packages/**/*', '!src/*', '!src/**/*', '!types/*', '!types/**/*', '!test/*', '!test/**/*', '!bench/*', '!bench/**/*', '!examples/*', '!examples/**/*']
'packages:bun':
- src/*
- src/**/*
'packages:bun-darwin-aarch64':
- packages/bun-darwin-aarch64/*
- packages/bun-darwin-aarch64/**/*
'packages:bun-error':
- packages/bun-error/*
- packages/bun-error/**/*
'packages:bun-framework-next':
- packages/bun-framework-next/*
- packages/bun-framework-next/**/*
'packages:bun-landing':
- packages/bun-landing/*
- packages/bun-landing/**/*
'packages:bun-linux-x64':
- packages/bun-linux-x64/*
- packages/bun-linux-x64/**/*
'packages:bun-macro-relay':
- packages/bun-macro-relay/*
- packages/bun-macro-relay/**/*
'packages:bun-types':
- packages/bun-types/*
- packages/bun-types/**/*
- types/bun/*
'packages:bun-wasm':
- packages/bun-wasm/*
- packages/bun-wasm/**/*
'tests':
- test/*
- test/**/*
'benchmarks':
- bench/*
- bench/**/*
'templates':
- examples/*
- examples/**/*

66
.github/labels.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
- name: 'bug'
color: 'd73a4a'
- name: 'segfault'
color: 'b60205'
- name: 'documentation'
color: '0075ca'
- name: 'duplicate'
color: 'cfd3d7'
- name: 'wontfix'
color: 'cfd3d7'
- name: 'question'
color: 'd876e3'
- name: 'enhancement'
color: 'a2eeef'
- name: 'good first issue'
color: '7057ff'
- name: 'help wanted'
color: '008672'
- name: 'infrastructure'
color: '7c5f8a'
- name: 'invalid'
color: 'e4e669'
- name: 'need repro'
color: 'c66037'
- name: 'node.js'
color: '0E8A16'
- name: 'napi'
color: 'BE05D2'
- name: 'esm<>cjs'
color: '7dcde3'
- name: 'performance'
color: 'E99695'
- name: 'polyfill'
color: 'f9c5e6'
- name: 'tracking'
color: '5319E7'
- name: 'transpiler'
color: 'BFDADC'
- name: 'typescript'
color: '87511b'
- name: 'chore'
color: 'cfd3d7'
- name: 'templates'
color: 'FBCA04'
- name: 'benchmarks'
color: 'FBCA04'
- name: 'tests'
color: 'FBCA04'
- name: 'packages:bun'
color: 'FBCA04'
- name: 'packages:bun-darwin-aarch64'
color: 'FBCA04'
- name: 'packages:bun-error'
color: 'FBCA04'
- name: 'packages:bun-framework-next'
color: 'FBCA04'
- name: 'packages:bun-landing'
color: 'FBCA04'
- name: 'packages:bun-linux-x64'
color: 'FBCA04'
- name: 'packages:bun-macro-relay'
color: 'FBCA04'
- name: 'packages:bun-types'
color: 'FBCA04'
- name: 'packages:bun-wasm'
color: 'FBCA04'

View File

@@ -1,62 +0,0 @@
### What does this PR do?
<!-- **Please explain what your changes do**, example: -->
<!--
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
-->
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
- [ ] Code changes
### How did you verify your code works?
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
<!-- I wrote automated tests -->
<!-- If JavaScript/TypeScript modules or builtins changed:
- [ ] I ran `make js` and committed the transpiled changes
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
- [ ] I included a test for the new code, or an existing test covers it
-->
<!-- If Zig files changed:
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I or my editor ran `zig fmt` on the changed files
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
-->
<!-- If new methods, getters, or setters were added to a publicly exposed class:
- [ ] I added TypeScript types for the new methods, getters, or setters
-->
<!-- If dependencies in tests changed:
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
-->
<!-- If functions were added to exports.zig or bindings.zig
- [ ] I ran `make headers` to regenerate the C header file
-->
<!-- If \*.classes.ts files were added or changed:
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
-->
<!-- If a new builtin ESM/CJS module was added:
- [ ] I updated Aliases in `module_loader.zig` to include the new module
- [ ] I added a test that imports the module
- [ ] I added a test that require() the module
-->

View File

@@ -1,18 +0,0 @@
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
name: Deploy site
on:
push:
paths:
- "docs/**"
branches: [main]
jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

47
.github/workflows/bun-dockerhub.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: bun-dockerhub
on:
push:
paths:
- dockerhub/Dockerfile
branches:
- main
pull_request:
paths:
- dockerhub/Dockerfile
branches:
- main
release:
types:
- published
jobs:
docker:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Collect metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
${{ secrets.DOCKERHUB_USERNAME }}/bun
tags: |
type=match,pattern=bun-v(\d.\d.\d),group=1
type=match,pattern=bun-v(\d.\d),group=1
type=match,pattern=bun-v(\d),group=1
type=ref,event=branch
type=ref,event=pr
- name: Login to DockerHub
if: github.event_name == 'release'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build image
uses: docker/build-push-action@v3
with:
context: ./dockerhub
push: ${{ github.event_name == 'release' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -1,50 +0,0 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

46
.github/workflows/bun-landing.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: bun-landing
on:
push:
paths:
- packages/bun-landing/**/*
branches: [main]
jobs:
build:
name: website build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
strategy:
fail-fast: false
matrix:
node-version:
- 16
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Use Node ${{ matrix.node }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- name: Install bun
uses: xhyrom/setup-bun@v0.1.2
with:
bun-version: latest
- name: Install global dependencies
run: bun install
- name: Install package dependencies
run: cd packages/bun-landing && bun install
- name: Build
run: cd packages/bun-landing && bun run build.tsx
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: build:(landing) automated website build
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,136 +0,0 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -11,8 +6,7 @@ env:
on:
push:
branches:
- main
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -20,8 +14,7 @@ on:
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
@@ -35,33 +28,36 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -75,19 +71,16 @@ jobs:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
cache-from: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.build_arch}}
cache-to: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.build_arch}},mode=max
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
@@ -127,6 +120,19 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- name: Release
id: release
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag_name: "canary"
files: |
${{runner.temp}}/release/bun-${{matrix.tag}}.zip
${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
@@ -135,98 +141,3 @@ jobs:
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
sudo apt-get update && sudo apt-get install -y openssl
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -1,9 +1,4 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -11,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -48,8 +42,8 @@ jobs:
tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -65,40 +59,21 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
tags: ghcr.io/oven-sh/bun-obj:${{github.sha}}-${{matrix.cpu}}-${{matrix.arch}}-macos
cache-from: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos
cache-to: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos,mode=max
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -106,18 +81,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -126,16 +100,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -144,7 +118,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,7 +126,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -172,16 +146,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -226,7 +194,7 @@ jobs:
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
mkdir -p $OBJ_DIR
make clean-bindings
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
@@ -241,23 +209,23 @@ jobs:
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -265,14 +233,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -290,15 +258,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -377,94 +340,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-aarch64
runner: macos-arm64
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -11,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -48,8 +42,8 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -65,40 +59,21 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
tags: ghcr.io/oven-sh/bun-obj:${{github.sha}}-${{matrix.cpu}}-${{matrix.arch}}-macos-baseline
cache-from: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos-baseline
cache-to: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos-baseline,mode=max
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -106,18 +81,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -126,16 +100,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -144,7 +118,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,7 +126,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -172,23 +146,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -214,7 +175,7 @@ jobs:
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
mkdir -p $BUN_DEPS_OUT_DIR
make vendor-without-check
- name: Compile C++
if: matrix.compile_obj
@@ -228,7 +189,7 @@ jobs:
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
mkdir -p $OBJ_DIR
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
if: matrix.compile_obj
@@ -242,23 +203,23 @@ jobs:
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -266,14 +227,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -291,15 +252,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -381,94 +337,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -11,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -48,8 +42,8 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -65,40 +59,21 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
tags: ghcr.io/oven-sh/bun-obj:${{github.sha}}-${{matrix.cpu}}-${{matrix.arch}}-macos
cache-from: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos
cache-to: type=registry,ref=ghcr.io/oven-sh/bun-obj:buildcache-bust-3--${{matrix.cpu}}-${{matrix.arch}}-macos,mode=max
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -106,18 +81,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -126,16 +100,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -144,7 +118,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,7 +126,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -172,10 +146,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -191,18 +165,6 @@ jobs:
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
- name: Compile dependencies
if: matrix.dependencies
env:
@@ -215,7 +177,7 @@ jobs:
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
mkdir -p $BUN_DEPS_OUT_DIR
make vendor-without-check
- name: Compile C++
if: matrix.compile_obj
@@ -229,7 +191,7 @@ jobs:
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
mkdir -p $OBJ_DIR
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
if: matrix.compile_obj
@@ -247,20 +209,19 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -268,14 +229,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -293,10 +254,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@15
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -327,11 +288,6 @@ jobs:
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -383,94 +339,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -1,179 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -1,54 +0,0 @@
name: bun-release-types-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

View File

@@ -1,257 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -1,41 +0,0 @@
name: bun-types
on:
push:
paths:
- "packages/bun-types/**"
branches: [main]
pull_request:
paths:
- "packages/bun-types/**"
jobs:
tests:
name: type-tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: |
bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test

24
.github/workflows/label-sync.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: Label Sync
on:
schedule:
- cron: '0 0 * * *'
push:
branches:
- main
paths:
- '.github/labels.yml'
workflow_dispatch:
jobs:
label-sync:
name: Label sync
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Sync labels
uses: crazy-max/ghaction-github-labeler@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}

14
.github/workflows/labeler.yml vendored Normal file
View File

@@ -0,0 +1,14 @@
name: 'Labeler'
on:
pull_request_target:
jobs:
labeler:
runs-on: ubuntu-latest
steps:
- name: Label PR
uses: actions/labeler@v4
with:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
sync-labels: true

View File

@@ -1,76 +0,0 @@
name: prettier
on:
pull_request:
branches:
- main
- jarred/test-actions
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- id: install
name: Install prettier
run: bun install
- name: Run prettier
id: fmt
run: |
rm -f .failed
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
if [ -s .failed ]; then
delimiter="$(openssl rand -hex 8)"
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
fi
- name: Comment on PR
if: steps.fmt.outputs.prettier_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
message: |
❌ @${{ github.actor }} `prettier` reported errors
```js
${{ steps.fmt.outputs.prettier_fmt_errs }}
```
To one-off fix this manually, run:
```sh
bun fmt
```
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.prettier_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `prettier` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Fail the job
if: steps.fmt.outputs.prettier_fmt_errs != ''
run: exit 1

View File

@@ -1,87 +0,0 @@
name: zig-fmt
env:
ZIG_VERSION: 0.12.0-dev.163+6780a6bbf
on:
pull_request:
branches:
- main
- jarred/test-actions
paths:
- "src/**/*.zig"
- "src/*.zig"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install zig
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
- name: Run zig fmt
id: fmt
run: |
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
delimiter="$(openssl rand -hex 8)"
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
if [ -s zig-fmt.err ]; then
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
fi
if [ -s zig-fmt.err2 ]; then
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
fi
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
- name: Comment on PR
if: steps.fmt.outputs.zig_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
message: |
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
```zig
// # zig fmt --check src/*.zig src/**/*.zig
${{ steps.fmt.outputs.zig_fmt_errs }}
```
To one-off fix this manually, run:
```sh
zig fmt src/*.zig src/**/*.zig
```
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.zig_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `zig fmt` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Fail the job
if: steps.fmt.outputs.zig_fmt_errs != ''
run: exit 1

35
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -13,7 +14,7 @@ dist
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
/package-lock.json
build
*.wat
zig-out
@@ -45,7 +46,6 @@ outcss
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
@@ -96,8 +96,6 @@ packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
*.bc
src/fallback.version
@@ -106,30 +104,3 @@ src/runtime.version
*.database
*.db
misctools/machbench
*.big
.eslintcache
bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/test.ts
src/js/out/modules*
src/js/out/functions*
src/js/out/tmp
src/js/out/DebugPath.h
make-dev-stats.csv
.uuid
tsconfig.tsbuildinfo

29
.gitmodules vendored
View File

@@ -36,7 +36,14 @@ shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/boringssl"]
path = src/deps/boringssl
url = https://github.com/oven-sh/boringssl.git
url = https://github.com/google/boringssl.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libbacktrace"]
path = src/deps/libbacktrace
url = https://github.com/ianlancetaylor/libbacktrace
ignore = dirty
depth = 1
shallow = true
@@ -48,6 +55,13 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/uws"]
path = src/deps/uws
url = https://github.com/Jarred-Sumner/uWebSockets
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = true
[submodule "src/deps/tinycc"]
path = src/deps/tinycc
url = https://github.com/Jarred-Sumner/tinycc.git
@@ -55,16 +69,3 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty
[submodule "src/deps/base64"]
path = src/deps/base64
url = https://github.com/aklomp/base64.git
ignore = dirty
depth = 1
shallow = true

View File

@@ -1,13 +0,0 @@
src/fallback.html
src/bun.js/WebKit
src/js/out
src/*.out.js
src/*out.*.js
src/deps
src/test/fixtures
src/react-refresh.js
test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

View File

@@ -1,15 +0,0 @@
module.exports = {
arrowParens: "avoid",
printWidth: 120,
trailingComma: "all",
useTabs: false,
quoteProps: "preserve",
overrides: [
{
files: ["*.md"],
options: {
printWidth: 80,
},
},
],
};

View File

@@ -1,21 +0,0 @@
// I would have made this a bash script but there isn't an easy way to track
// time in bash sub-second cross platform.
import fs from "fs";
const start = Date.now() + 5;
const result = Bun.spawnSync(process.argv.slice(2), {
stdio: ["inherit", "inherit", "inherit"],
});
const end = Date.now();
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
const success = result.exitCode === 0;
try {
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
} else {
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
}
} catch {
// Ignore
}
process.exit(result.exitCode);

View File

@@ -1,13 +0,0 @@
#!/bin/bash
set -euxo pipefail
# if bun-webkit node_modules directory exists
if [ -d ./node_modules/bun-webkit ]; then
rm -f bun-webkit
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

8
.scripts/write-versions.sh Executable file → Normal file
View File

@@ -7,9 +7,7 @@ LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
@@ -18,12 +16,10 @@ echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_li
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
echo "" >>src/generated_versions_list.zig
zig fmt src/generated_versions_list.zig

View File

@@ -4,49 +4,31 @@
"name": "Mac",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/*",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/"
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
"${workspaceFolder}/src/bun.js/WebKit/Source/bmalloc/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/../webkit-build/include/"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi"
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/*",
"${workspaceFolder}/src/bun.js/bindings/**",
"${workspaceFolder}/src/bun.js/modules/**",
"${workspaceFolder}/src/bun.js/builtins/**",
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
"${workspaceFolder}/src/bun.js/WebKit/Source/bmalloc/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/../webkit-build/include/"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"
@@ -63,9 +45,10 @@
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"compilerPath": "clang++",
"cStandard": "c17",
"cppStandard": "c++20"
"cppStandard": "c++20",
"intelliSenseMode": "macos-clang-x64"
}
],
"version": 4

View File

@@ -1,10 +1,8 @@
{
"recommendations": [
"ziglang.vscode-zig",
"AugusteRame.zls-vscode",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb",
"bierner.comment-tagged-templates",
"ms-vscode.cpptools"
"vadimcn.vscode-lldb"
]
}

316
.vscode/launch.json generated vendored
View File

@@ -1,37 +1,51 @@
{
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
// It will force the garbage collector to run after every test and every call to expect()
// it makes our tests very slow
// But it helps catch memory bugs
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "bun test [file]",
"name": "bun test",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"name": "bun test (all)",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run current file",
"program": "bun-debug",
"args": ["${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run",
"program": "bun-debug",
"args": ["check.tsx", "-c"],
"cwd": "${env:HOME}/Build/react-ssr",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
@@ -39,11 +53,10 @@
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"name": "bun http example",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"args": ["run", "examples/http.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
@@ -52,107 +65,10 @@
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"name": "bun http file example",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "bun-debug",
"args": ["run", "${file}", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"NODE_ENV": "development"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (gc)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (verbose)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/http-file.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
@@ -161,27 +77,16 @@
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --watch",
"name": "bun html-rewriter example",
"program": "bun-debug",
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --hot",
"program": "bun-debug",
"args": ["run", "--hot", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/html-rewriter.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -191,129 +96,6 @@
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug out.js",
"program": "bun-debug",
"args": ["--outfile=out.js", "bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug STDOUT",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (no splitting, browser entry)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-no-split",
"--platform=browser",
"bun",
"./quick.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-split",
"--server-components",
"--platform=bun",
"--splitting",
"bun",
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (NO splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsccheck",
"--server-components",
"--platform=bun",
"bun",
"/Users/jarred/Code/bun-rsc/pages/index.js"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bunx debug",
"program": "bun-debug",
"args": ["--bun", "x", "tsc", "--help"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
}
},
{
"type": "lldb",
"request": "launch",
@@ -322,24 +104,6 @@
"args": ["https://example.com", "--verbose"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "Build zig unit test",
"program": "make",
"args": ["build-unit", "${file}"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "Run zig unit test",
"program": "${workspaceFolder}/zig-out/bin/test",
"args": ["abc"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
}
]
}

65
.vscode/settings.json vendored
View File

@@ -7,15 +7,10 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
"zig.formattingProvider": "extension",
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "AugusteRame.zls-vscode",
"editor.formatOnSave": true
},
"[ts]": {
@@ -26,8 +21,6 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"zig.zls.enableInlayHints": false,
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
@@ -36,22 +29,9 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[yaml]": {
"editor.formatOnSave": true
},
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
"comments": "off",
"strings": "off",
"other": "off"
}
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"lldb.verboseLogging": false,
"files.exclude": {
"**/.git": true,
@@ -64,6 +44,8 @@
"**/*.xcscheme": true,
"**/*.pem": true,
"**/*.xcodeproj": true,
"packages/bun-types/*.d.ts": true,
"test/snapshots": true,
"test/snapshots-no-hmr": true,
"src/bun.js/WebKit": true,
@@ -75,12 +57,7 @@
"src/deps/uws": true,
"src/deps/zlib": true,
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true,
"src/packages/bun-uws/fuzzing/seed-corpus/": true
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -110,7 +87,6 @@
"editor.defaultFormatter": "xaver.clang-format"
},
"files.associations": {
"*.lock": "yarnlock",
"*.idl": "cpp",
"memory": "cpp",
"iostream": "cpp",
@@ -202,26 +178,7 @@
"ctype.h": "c",
"ethernet.h": "c",
"inet.h": "c",
"packet.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
"packet.h": "c"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"eslint.workingDirectories": ["packages/bun-types"],
"typescript.tsdk": "node_modules/typescript/lib"
"cmake.configureOnOpen": false
}

View File

@@ -1,76 +0,0 @@
# Contributing to Bun
> **Important:** All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Bun's codebase
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
Today (February 2023), Bun's codebase has five distinct parts:
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
- Package manager ([`src/install/`](src/install/))
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
## Getting started
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
## Memory management in Bun
For the Zig code, please:
1. Do your best to avoid dynamically allocating memory.
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
## JavaScript runtime
Most of Bun's JavaScript runtime code lives in [`src/bun.js`](src/bun.js).
### Calling C++ from Zig & Zig from C++
TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and [`bindings.cpp`](src/bun.js/bindings/bindings.cpp) for now)
### Adding a new JavaScript class
1. Add a new file in [`src/bun.js/*.classes.ts`](src/bun.js) to define the instance and static methods for the class.
2. Add a new file in [`src/bun.js/**/*.zig`](src/bun.js) and expose the struct in [`src/bun.js/generated_classes_list.zig`](src/bun.js/generated_classes_list.zig)
3. Run `make codegen`
Copy from examples like `Subprocess` or `Response`.
### ESM Modules and Builtins JS
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
Builtin modules in Bun are located in [`src/js`](src/js/). These files are transpiled and support a JavaScriptCore-only syntax for internal slots, which is explained further in [`src/js/README.md`](src/js/README.md).
Native C++ modules are in `src/bun.js/modules/`.
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)
### Strings
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
#### JavaScriptCore C API
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
## Testing
See [`test/README.md`](test/README.md) for information on how to run tests.

View File

@@ -7,20 +7,17 @@ ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG CPU_TARGET=native
ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=2023-aug3-5
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.12.0-dev.163+6780a6bbf"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG ZIG_URL="https://github.com/oven-sh/zig/releases/download/$ZIG_TAG/zig-linux-$BUILDARCH.zip"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.8
ARG BUN_BASE_VERSION=0.1
FROM bitnami/minideb:bullseye as bun-base
@@ -28,7 +25,7 @@ RUN install_packages ca-certificates curl wget lsb-release software-properties-c
RUN wget https://apt.llvm.org/llvm.sh && \
chmod +x llvm.sh && \
./llvm.sh 15
./llvm.sh 13
RUN install_packages \
cmake \
@@ -46,11 +43,10 @@ RUN install_packages \
rsync \
ruby \
unzip \
xz-utils \
bash tar gzip ccache
bash tar gzip
ENV CXX=clang++-15
ENV CC=clang-15
ENV CXX=clang++-13
ENV CC=clang-13
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages nodejs && \
@@ -67,15 +63,13 @@ ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ENV WEBKIT_OUT_DIR=${WEBKIT_DIR}
ENV BUILDARCH=${BUILDARCH}
ENV AR=/usr/bin/llvm-ar-15
ENV AR=/usr/bin/llvm-ar-13
ENV ZIG "${ZIG_PATH}/zig"
ENV PATH="$ZIG/bin:$PATH"
ENV LD=lld-15
ENV LD=lld-13
RUN mkdir -p $BUN_DIR $BUN_DEPS_OUT_DIR
@@ -84,8 +78,8 @@ FROM bun-base as bun-base-with-zig-and-webkit
WORKDIR $GITHUB_WORKSPACE
ADD $ZIG_URL .
RUN tar xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && mv ${ZIG_FOLDERNAME} zig;
RUN unzip -q zig-linux-$BUILDARCH.zip && \
rm zig-linux-$BUILDARCH.zip;
@@ -111,30 +105,6 @@ RUN mkdir -p ${WEBKIT_DIR} && cd ${GITHUB_WORKSPACE} && \
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
LABEL org.opencontainers.image.source=https://github.com/oven-sh/bun
FROM bun-base as c-ares
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make c-ares && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
FROM bun-base as lolhtml
@@ -149,15 +119,10 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-15) && cd ${BUN_DIR} && \
RUN export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-13) && cd ${BUN_DIR} && \
make lolhtml && rm -rf src/deps/lol-html Makefile
FROM bun-base as mimalloc
@@ -176,9 +141,7 @@ COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
RUN cd ${BUN_DIR} && \
make mimalloc && rm -rf src/deps/mimalloc Makefile
FROM bun-base as zlib
@@ -199,9 +162,7 @@ COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
RUN cd $BUN_DIR && \
make zlib && rm -rf src/deps/zlib Makefile
FROM bun-base as libarchive
@@ -222,14 +183,8 @@ RUN install_packages autoconf automake libtool pkg-config
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
ENV CCACHE_DIR=/ccache
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
make libarchive && rm -rf src/deps/libarchive Makefile
RUN make libarchive && rm -rf src/deps/libarchive Makefile
FROM bun-base as tinycc
@@ -246,6 +201,27 @@ ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR}
FROM bun-base as libbacktrace
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libbacktrace ${BUN_DIR}/src/deps/libbacktrace
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make libbacktrace && rm -rf src/deps/libbacktrace Makefile
FROM bun-base as boringssl
RUN install_packages golang
@@ -266,35 +242,7 @@ COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && make boringssl && rm -rf src/deps/boringssl Makefile
FROM bun-base as uws
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY packages/bun-uws ${BUN_DIR}/packages/bun-uws
COPY packages/bun-usockets ${BUN_DIR}/packages/bun-usockets
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make uws && rm -rf packages/bun-uws Makefile
RUN make boringssl && rm -rf src/deps/boringssl Makefile
FROM bun-base as base64
@@ -310,12 +258,36 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
COPY src/base64 ${BUN_DIR}/src/base64
WORKDIR $BUN_DIR
RUN make base64 && rm -rf src/base64 Makefile
FROM bun-base as uws
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/uws ${BUN_DIR}/src/deps/uws
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make base64 && rm -rf src/deps/base64 Makefile
make uws && rm -rf src/deps/uws Makefile
FROM bun-base as picohttp
@@ -399,12 +371,11 @@ ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY ./root.zig ${BUN_DIR}/root.zig
COPY ./src ${BUN_DIR}/src
COPY ./build.zig ${BUN_DIR}/build.zig
COPY ./completions ${BUN_DIR}/completions
COPY ./packages ${BUN_DIR}/packages
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./build-id ${BUN_DIR}/build-id
COPY ./package.json ${BUN_DIR}/package.json
COPY ./misctools ${BUN_DIR}/misctools
COPY Makefile ${BUN_DIR}/Makefile
@@ -424,7 +395,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -443,14 +413,12 @@ ENV GIT_SHA=${GIT_SHA}
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./build-id ${BUN_DIR}/build-id
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
RUN cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
OUTPUT_DIR=/tmp $ZIG_PATH/zig build obj -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun.o /tmp/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
@@ -461,13 +429,12 @@ ARG ZIG_PATH
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG GIT_SHA
ARG TRIPLET
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY --from=compile_release_obj /tmp/bun-${TRIPLET}-${GIT_SHA}/*.o /
COPY --from=compile_release_obj /tmp/*.o /
FROM prepare_release as compile_cpp
@@ -483,22 +450,28 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
# Required for webcrypto bindings
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
RUN cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
FROM prepare_release as sqlite
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
@@ -509,48 +482,14 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
RUN cd $BUN_DIR && make sqlite
FROM scratch as build_release_cpp
@@ -570,30 +509,26 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=build_release_obj /*.o /tmp
COPY --from=build_release_cpp /*.o ${BUN_DIR}/src/bun.js/bindings-obj/
COPY --from=build_release_cpp /*.a ${BUN_DEPS_OUT_DIR}/
RUN cd $BUN_DIR && mkdir -p ${BUN_RELEASE_DIR} && make bun-relink copy-to-bun-release-dir && \
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages

115
Dockerfile.devcontainer Normal file
View File

@@ -0,0 +1,115 @@
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG BUILDARCH=amd64
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
FROM --platform=linux/${BUILDARCH} ubuntu:22.04 as bun.devcontainer
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
ENV PATH "$ZIG_PATH:$PATH"
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
ENV LIB_ICU_PATH ${WEBKIT_OUT_DIR}/lib
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
ENV PATH "${GITHUB_WORKSPACE}/packages/bun-linux-x64:${GITHUB_WORKSPACE}/packages/bun-linux-aarch64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-x64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-aarch64:$PATH"
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
ENV BUN_INSTALL /home/ubuntu/.bun
ENV XDG_CONFIG_HOME /home/ubuntu/.config
WORKDIR ${GITHUB_WORKSPACE}
RUN apt-get update && \
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
add-apt-repository ppa:longsleep/golang-backports && \
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
chmod +x llvm.sh && \
./llvm.sh 13 && \
curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
apt-get update && \
apt-get install --no-install-recommends -y \
ca-certificates \
curl \
gnupg2 \
software-properties-common \
cmake \
build-essential \
git \
libssl-dev \
ruby \
liblld-13-dev \
libclang-13-dev \
nodejs \
gcc \
g++ \
clang-13 \
clang-format-13 \
libc++-13-dev \
libc++abi-13-dev \
lld-13 \
libicu-dev \
wget \
rustc \
cargo \
unzip \
tar \
golang-go ninja-build pkg-config automake autoconf libtool curl && \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-13 90 && \
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-13 90 && \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-13 90 && \
npm install -g esbuild
ENV CC=clang-13
ENV CXX=clang++-13
ENV ZIG "${ZIG_PATH}/zig"
WORKDIR $GITHUB_WORKSPACE
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
WORKDIR $GITHUB_WORKSPACE
RUN cd $GITHUB_WORKSPACE && \
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/oven-sh/zig/releases/download/jul1/zig-linux-$BUILDARCH.zip && \
unzip -q zig-linux-$BUILDARCH.zip && \
rm zig-linux-$BUILDARCH.zip;
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/sept17/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-13 90
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/limits.conf /etc/security/limits.conf
COPY ".devcontainer/scripts/" /scripts/
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
COPY ".devcontainer/README.md" $GITHUB_WORKSPACE/README.md
ENV JSC_BASE_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR=$WEBKIT_DIR
ENV WEBKIT_DEBUG_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR_LTO=$WEBKIT_DIR
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
bash /scripts/common-debian.sh && \
bash /scripts/github.sh && \
bash /scripts/nice.sh && \
bash /scripts/zig-env.sh
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json

847
Makefile

File diff suppressed because it is too large Load Diff

3792
README.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,30 +0,0 @@
// https://github.com/nodejs/node/issues/34493
import { AsyncLocalStorage } from "async_hooks";
const asyncLocalStorage = new AsyncLocalStorage();
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
let fn = () => /test/.test("test");
let runWithExpiry = async (expiry, fn) => {
let iterations = 0;
while (Date.now() < expiry) {
await fn();
iterations++;
}
return iterations;
};
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
let withAls;
await asyncLocalStorage.run(123, async () => {
withAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
});
asyncLocalStorage.disable();
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");

Binary file not shown.

View File

@@ -1,171 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
esbuild

View File

@@ -1,40 +0,0 @@
# Bundler benchmark
This is a performance benchmark of the following bundlers:
- Bun
- esbuild
- Parcel 2
- Rollup + Terser
- Webpack
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
To run the benchmark:
```sh
$ chmod +x run-bench.sh
$ ./run-bench.sh
```
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
```sh
real <number>
user <number>
sys <number>
```
These lines are generated by the `time` command which is used to benchmark each build.
## Results
The `real` results, as run on a 16-inch M1 Macbook Pro:
| Bundler | Time |
| ------- | ------ |
| Bun | 0.17s |
| esbuild | 0.33s |
| Rollup | 18.82s |
| Webpack | 26.21 |
| Parcel | 17.95s |

Binary file not shown.

View File

@@ -1,8 +0,0 @@
{
"name": "bundle",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.7.0"
}
}

View File

@@ -1,3 +0,0 @@
git clone git@github.com:colinhacks/esbuild.git
cd esbuild
make bench-three

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -6,4 +6,7 @@ const arg = process.argv.slice(1);
// TODO: remove Buffer.from() when readFileSync() returns Buffer
for (let i = 0; i < count; i++) console.log(arg.map(file => Buffer.from(readFileSync(file, "utf8"))).join(""));
for (let i = 0; i < count; i++)
console.log(
arg.map((file) => Buffer.from(readFileSync(file, "utf8"))).join("")
);

View File

@@ -7,8 +7,14 @@ function runner(ready) {
for (let i = 0; i < size; i++) {
rand[i] = (Math.random() * 1024 * 1024) | 0;
}
const dest = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const dest = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
writeFileSync(src, Buffer.from(rand.buffer), { encoding: "buffer" });
const { size: fileSize } = statSync(src);
if (fileSize !== rand.byteLength) {
@@ -29,6 +35,6 @@ runner((src, dest, rand) =>
// );
// }
// }
}),
})
);
await run();

View File

@@ -1,31 +0,0 @@
import EventEmitter3 from "eventemitter3";
import { group } from "mitata";
import EventEmitterNative from "node:events";
export const implementations = [
{
EventEmitter: EventEmitterNative,
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
monkey: true,
},
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
].filter(Boolean);
for (const impl of implementations) {
impl.EventEmitter?.setMaxListeners?.(Infinity);
}
export function groupForEmitter(name, cb) {
if (implementations.length === 1) {
return cb({
...implementations[0],
name: `${name}: ${implementations[0].name}`,
});
} else {
return group(name, () => {
for (let impl of implementations) {
cb(impl);
}
});
}
}

View File

@@ -1,96 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("single emit", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
if (!called) throw new Error("not called");
});
});
// for (let { impl: EventEmitter, name, monkey } of []) {
// if (monkey) {
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
// monkeyEmitter.on("hello", event => {
// event.preventDefault();
// });
// bench(`[monkey] ${className}.emit`, () => {
// var called = false;
// monkeyEmitter.emit("hello", {
// preventDefault() {
// id++;
// called = true;
// },
// });
// if (!called) {
// throw new Error("monkey failed");
// }
// });
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
// var cb = () => {
// event.preventDefault();
// };
// monkeyEmitter.on("hey", cb);
// for (let i = 0; i < 10_000; i++)
// monkey.emit("hey", {
// preventDefault() {
// id++;
// },
// });
// monkeyEmitter.off("hey", cb);
// });
// }
// }
// var target = new EventTarget();
// target.addEventListener("hello", event => {});
// bench("EventTarget.dispatch", () => {
// target.dispatchEvent(event);
// });
// var hey = new Event("hey");
// bench("EventTarget.on x 10_000 (handler)", () => {
// var handler = event => {};
// target.addEventListener("hey", handler);
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
// target.removeEventListener("hey", handler);
// });
await run();

View File

@@ -1,40 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("test 1", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("test 2", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
await run();

View File

@@ -1,63 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
let rngState = 123456789;
function nextInt() {
const m = 0x80000000; // 2**31;
const a = 1103515245;
const c = 12345;
rngState = (a * rngState + c) % m;
return rngState;
}
function nextRange(start, end) {
// returns in range [start, end): including start, excluding end
// can't modulu nextInt because of weak randomness in lower bits
const rangeSize = end - start;
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
return start + Math.floor(randomUnder1 * rangeSize);
}
const chunks = new Array(1024).fill(null).map((_, j) => {
const arr = new Uint8Array(1024);
for (let i = 0; i < arr.length; i++) {
arr[i] = nextRange(0, 256);
}
return arr;
});
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
bench(name, () => {
let id = 0;
const stream = new EventEmitter();
stream.on("start", res => {
if (res.status !== 200) throw new Error("not 200");
});
const recived = [];
stream.on("data", req => {
recived.push(req);
});
stream.on("end", ev => {
ev.preventDefault();
});
// simulate a stream
stream.emit("start", { status: 200 });
for (let chunk of chunks) {
stream.emit("data", chunk);
}
stream.emit("end", {
preventDefault() {
id++;
},
});
if (id !== 1) throw new Error("not implemented right");
if (recived.length !== 1024) throw new Error("not implemented right");
});
});
await run();

View File

@@ -1,43 +0,0 @@
# expect-to-equal
To install dependencies:
```bash
bun install
```
To run in Bun:
```bash
# so it doesn't run the vitest one
bun wiptest expect-to-equal.test.js
```
To run in Jest:
```bash
# If you remove the import the performance doesn't change much
NODE_OPTIONS="--experimental-vm-modules" ./node_modules/.bin/jest expect-to-equal.test.js
```
To run in Vitest:
```bash
./node_modules/.bin/vitest --run expect-to-equal.vitest.test.js
```
Output on my machine (M1):
bun:test (bun v0.3.0):
> [36.40ms] expect().toEqual() x 10000
jest (node v18.11.0)
> expect().toEqual() x 10000: 5053 ms
vitest (node v18.11.0)
> expect().toEqual() x 10000: 401.08ms
This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

Binary file not shown.

View File

@@ -1,42 +0,0 @@
// bun:test automatically rewrites this import to bun:test when run in bun
import { test, expect } from "@jest/globals";
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
if (!Number.isSafeInteger(N)) {
throw new Error("Invalid RUN_COUNT");
}
const label = "expect().toEqual() x " + N;
test(label, () => {
console.time(label);
for (let runsLeft = N; runsLeft > 0; runsLeft--) {
expect("hello").toEqual("hello");
expect(123).toEqual(123);
expect({ a: 1, b: 2 }).toEqual({ b: 2, a: 1 });
expect([1, 2, 3]).toEqual([1, 2, 3]);
expect({ a: 1, b: 2 }).not.toEqual({ b: 2, a: 1, c: 3 });
expect([1, 2, 3]).not.toEqual([1, 2, 3, 4]);
expect({ a: 1, b: 2, c: 3 }).not.toEqual({ a: 1, b: 2 });
expect([1, 2, 3, 4]).not.toEqual([1, 2, 3]);
let a = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
let b = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
expect(a).toEqual(b);
expect(b).toEqual(a);
a[0].a = 2;
expect(a).not.toEqual(b);
expect(b).not.toEqual(a);
let c = { [Symbol("test")]: 1 };
let d = { [Symbol("test")]: 1 };
expect(c).not.toEqual(d);
expect(d).not.toEqual(c);
a = { a: 1, b: 2, c: 3 };
b = { a: 1, b: 2 };
expect(a).not.toEqual(b);
}
console.timeEnd(label);
});

View File

@@ -1,41 +0,0 @@
import { test, expect } from "vitest";
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
if (!Number.isSafeInteger(N)) {
throw new Error("Invalid RUN_COUNT");
}
const label = "expect().toEqual() x " + N;
test(label, () => {
console.time(label);
for (let runsLeft = N; runsLeft > 0; runsLeft--) {
expect("hello").toEqual("hello");
expect(123).toEqual(123);
expect({ a: 1, b: 2 }).toEqual({ b: 2, a: 1 });
expect([1, 2, 3]).toEqual([1, 2, 3]);
expect({ a: 1, b: 2 }).not.toEqual({ b: 2, a: 1, c: 3 });
expect([1, 2, 3]).not.toEqual([1, 2, 3, 4]);
expect({ a: 1, b: 2, c: 3 }).not.toEqual({ a: 1, b: 2 });
expect([1, 2, 3, 4]).not.toEqual([1, 2, 3]);
let a = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
let b = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
expect(a).toEqual(b);
expect(b).toEqual(a);
a[0].a = 2;
expect(a).not.toEqual(b);
expect(b).not.toEqual(a);
let c = { [Symbol("test")]: 1 };
let d = { [Symbol("test")]: 1 };
expect(c).not.toEqual(d);
expect(d).not.toEqual(c);
a = { a: 1, b: 2, c: 3 };
b = { a: 1, b: 2 };
expect(a).not.toEqual(b);
}
console.timeEnd(label);
});

View File

@@ -1 +0,0 @@
console.log("Hello via Bun!");

View File

@@ -1,9 +0,0 @@
{
"name": "expect-to-equal",
"module": "index.ts",
"type": "module",
"devDependencies": {
"jest": "^29.3.1",
"vitest": "^0.25.3"
}
}

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,15 +0,0 @@
import { bench, run } from "mitata";
const count = 100;
bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
}
await Promise.all(requests);
});
await run();

View File

@@ -1,15 +0,0 @@
import { bench, run } from "https://esm.run/mitata";
const count = 100;
bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
}
await Promise.all(requests);
});
await run();

View File

@@ -1,15 +0,0 @@
import { bench, run } from "mitata";
const count = 100;
bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
}
await Promise.all(requests);
});
await run();

View File

@@ -1,7 +1,8 @@
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
import { run, bench, group } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
const { napiNoop, napiHash, napiString } = require(import.meta.dir +
"/src/ffi_napi_bench.node");
const {
symbols: {

View File

@@ -1,14 +1,17 @@
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
const path = new URL(
"src/target/release/libffi_napi_bench." + extension,
import.meta.url
).pathname;
const {
symbols: { ffi_noop, ffi_hash, ffi_string },
} = Deno.dlopen(path, {
ffi_noop: { parameters: [], result: "void" },
ffi_string: { parameters: [], result: "pointer" },
ffi_hash: { parameters: ["buffer", "u32"], result: "u32" },
ffi_hash: { parameters: ["pointer", "u32"], result: "u32" },
});
const bytes = new Uint8Array(64);
@@ -16,7 +19,9 @@ const bytes = new Uint8Array(64);
group("deno:ffi", () => {
bench("noop", () => ffi_noop());
bench("hash", () => ffi_hash(bytes, bytes.byteLength));
bench("c string", () => Deno.UnsafePointerView.getCString(ffi_string()));
bench("c string", () =>
new Deno.UnsafePointerView(ffi_string()).getCString()
);
});
await run();

Binary file not shown.

View File

@@ -29,7 +29,7 @@ loop:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cp src/colors.css.blob $(PROJECT)/colors.css.blob
loop-emotion:
@@ -40,7 +40,7 @@ loop-emotion:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cp src/css-in-js-styles.tsx.blob $(PROJECT)/css-in-js-styles.blob
process_video:

View File

@@ -24,7 +24,7 @@ if (process.env.PROJECT === "bun") {
// bunProcess.stderr.pipe(process.stderr);
// bunProcess.stdout.pipe(process.stdout);
bunProcess.once("error", err => {
bunProcess.once("error", (err) => {
console.error("❌ bun error", err);
process.exit(1);
});
@@ -32,15 +32,19 @@ if (process.env.PROJECT === "bun") {
bunProcess?.kill(0);
});
} else if (process.env.PROJECT === "next") {
const bunProcess = child_process.spawn("./node_modules/.bin/next", ["--port", "8080"], {
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
const bunProcess = child_process.spawn(
"./node_modules/.bin/next",
["--port", "8080"],
{
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
shell: false,
});
shell: false,
}
);
}
const delay = new Promise((resolve, reject) => {
@@ -107,8 +111,8 @@ async function main() {
return runPage();
}
main().catch(error =>
main().catch((error) =>
setTimeout(() => {
throw error;
}),
})
);

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
);
};
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
\\
++ SIMULATE_LONG_FILE;
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import "../src/index.css";
import App from "next/app";

View File

@@ -3,7 +3,6 @@ export function IndexPage() {
return (
<Main
productName={
// @ts-ignore
typeof location !== "undefined" ? location.search.substring(1) : ""
}
/>

View File

@@ -4,7 +4,9 @@ const path = require("path");
const PROJECT = process.env.PROJECT || "bun";
const percentile = require("percentile");
const PACKAGE_NAME = process.env.PACKAGE_NAME;
const label = `${PACKAGE_NAME}@${require(PACKAGE_NAME + "/package.json").version}`;
const label = `${PACKAGE_NAME}@${
require(PACKAGE_NAME + "/package.json").version
}`;
const BASEFOLDER = path.resolve(PROJECT);
const OUTFILE = path.join(process.cwd(), process.env.OUTFILE);
@@ -18,10 +20,10 @@ const TOTAL_FRAMES = VALID_TIMES.length;
const timings = fs
.readFileSync(BASEFOLDER + "/frames.all.clean", "utf8")
.split("\n")
.map(a => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter(a => parseInt(a, 10))
.filter(a => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map(num => BigInt(num));
.map((a) => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter((a) => parseInt(a, 10))
.filter((a) => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map((num) => BigInt(num));
timings.sort();
@@ -45,7 +47,7 @@ const report = {
name: PACKAGE_NAME,
version: require(PACKAGE_NAME + "/package.json").version,
},
timestamps: timings.map(a => Number(a)),
timestamps: timings.map((a) => Number(a)),
frameTimes: frameTime,
percentileMs: {
50: percentile(50, frameTime) / 10,
@@ -65,10 +67,12 @@ fs.writeFileSync(
"." +
process.env.SLEEP_INTERVAL +
"ms." +
`${process.platform}-${process.arch === "arm64" ? "aarch64" : process.arch}` +
".json",
`${process.platform}-${
process.arch === "arm64" ? "aarch64" : process.arch
}` +
".json"
),
JSON.stringify(report, null, 2),
JSON.stringify(report, null, 2)
);
console.log(
@@ -95,5 +99,9 @@ console.log(
timings.length,
"/",
TOTAL_FRAMES,
"(" + Math.round(Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100) + "%)",
"(" +
Math.round(
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100
) +
"%)"
);

View File

@@ -3,7 +3,10 @@ import classNames from "classnames";
import ReactDOM from "react-dom";
const Base = ({}) => {
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
const name =
typeof location !== "undefined"
? decodeURIComponent(location.search.substring(1))
: null;
return <Main productName={name} />;
};

View File

@@ -1,11 +1,11 @@
export const Main = (props: { productName: string; cssInJS?: string }) => {
export const Main = ({ productName, cssInJS }) => {
return (
<>
<header>
<div className="Title">CSS HMR Stress Test!</div>
<p className="Description">
This page visually tests how quickly a bundler can update {props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot
Module Reloading.
This page visually tests how quickly a bundler can update{" "}
{cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
</p>
</header>
<main className="main">
@@ -19,7 +19,9 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="ProgressBar-container">
<div className="ProgressBar"></div>
</div>
<div className="SectionLabel">The progress bar should move from left to right smoothly.</div>
<div className="SectionLabel">
The progress bar should move from left to right smoothly.
</div>
</section>
<section>
@@ -40,15 +42,21 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="Spinner"></div>
</div>
</div>
<div className="SectionLabel">The spinners should rotate &amp; change color smoothly.</div>
<div className="SectionLabel">
The spinners should rotate &amp; change color smoothly.
</div>
</section>
</main>
<footer>
<div className="SectionLabel FooterLabel">There are no CSS animations on this page.</div>
<div className="SectionLabel FooterLabel">
There are no CSS animations on this page.
</div>
<div className="Bundler-container">
<div className="Bundler">{props.productName}</div>
<div className="Bundler-updateRate">{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}</div>
<div className="Bundler">{productName}</div>
<div className="Bundler-updateRate">
{cssInJS ? "CSS-in-JS framework: " + cssInJS : ""}
</div>
</div>
</footer>
</>

View File

@@ -1,8 +1,6 @@
{
"extends": "../../../tsconfig.base.json",
"compilerOptions": {
"baseUrl": ".",
"jsx": "react-jsx",
"paths": {}
}
}

View File

@@ -1,4 +0,0 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: ["@remix-run/eslint-config", "@remix-run/eslint-config/node"],
};

View File

@@ -1,10 +0,0 @@
node_modules
/.cache
/build
/public/build
.env
package-lock.json
yarn.lock
pnpm-lock.yaml
bun.lockb

View File

@@ -1,18 +0,0 @@
# `install` benchmark
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
```
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
```
To check that the app is working as expected:
```
$ bun run dev
$ npm run dev
$ yarn dev
$ pnpm dev
```
Then visit [http://localhost:3000](http://localhost:3000).

View File

@@ -1,18 +0,0 @@
/**
* By default, Remix will handle hydrating your app on the client for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
*/
import { RemixBrowser } from "@remix-run/react";
import { startTransition, StrictMode } from "react";
import { hydrateRoot } from "react-dom/client";
startTransition(() => {
hydrateRoot(
document,
<StrictMode>
<RemixBrowser />
</StrictMode>,
);
});

View File

@@ -1,101 +0,0 @@
/**
* By default, Remix will handle generating the HTTP Response for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
*/
import { PassThrough } from "node:stream";
import type { EntryContext } from "@remix-run/node";
import { Response } from "@remix-run/node";
import { RemixServer } from "@remix-run/react";
import isbot from "isbot";
import { renderToPipeableStream } from "react-dom/server";
const ABORT_DELAY = 5_000;
export default function handleRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return isbot(request.headers.get("user-agent"))
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
}
function handleBotRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onAllReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
responseStatusCode = 500;
console.error(error);
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}
function handleBrowserRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onShellReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
console.error(error);
responseStatusCode = 500;
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}

View File

@@ -1,20 +0,0 @@
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
export default function App() {
return (
<html lang="en">
<head>
<meta charSet="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<Meta />
<Links />
</head>
<body>
<Outlet />
<ScrollRestoration />
<Scripts />
<LiveReload />
</body>
</html>
);
}

Some files were not shown because too many files have changed in this diff Show More