Compare commits

..

2 Commits

Author SHA1 Message Date
Derrick Farris
78d6743f58 fix(node-test-helpers): make sure to call done with any Errors thrown from wrapped fn 2023-01-11 20:54:40 -06:00
Derrick Farris
f8114d09fd test(child_process): fix assert -> assert.ok 2023-01-11 20:52:15 -06:00
1595 changed files with 129812 additions and 313948 deletions

28
.devcontainer/README.md Normal file
View File

@@ -0,0 +1,28 @@
# Bun's Dev Container
To get started, login to GitHub and clone bun's GitHub repo into `/build/bun`
# First time setup
```bash
gh auth login # if it fails to open a browser, use Personal Access Token instead
gh repo clone oven-sh/bun . -- --depth=1 --progress -j8
```
# Compile bun dependencies (zig is already compiled)
```bash
make devcontainer
```
# Build bun for development
```bash
make dev
```
# Run bun
```bash
bun-debug help
```

View File

@@ -0,0 +1,70 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
{
"name": "bun (Ubuntu)",
// Sets the run context to one level up instead of the .devcontainer folder.
"context": "..",
"hostRequirements": { "memory": "16gb" },
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
"dockerFile": "../Dockerfile.devcontainer",
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": "/bin/zsh",
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
"zig.zigPath": "/build/zig/zig",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"AugusteRame.zls-vscode",
"ms-vscode.cpptools",
"/home/ubuntu/vscode-zig.vsix",
"vadimcn.vscode-lldb",
"esbenp.prettier-vscode",
"xaver.clang-format"
],
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; cat /build/README.md",
"build": {
"target": "bun.devcontainer",
"cacheFrom": ["ghcr.io/oven-sh/bun.devcontainer:latest"],
"args": {
"BUILDARCH": "${localEnv:DOCKER_BUILDARCH:amd64}",
"--platform": "linux/${localEnv:DOCKER_BUILDARCH:amd64}",
"--tag": "ghcr.io/oven-sh/bun.devcontainer:latest"
}
},
"runArgs": [
"--ulimit",
"memlock=-1:-1",
"--ulimit",
"nofile=65536:65536",
"--cap-add=SYS_PTRACE",
"--security-opt",
"seccomp=unconfined"
],
"workspaceMount": "source=bun,target=/build/bun,type=volume",
"workspaceFolder": "/build/bun",
"mounts": [
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
"source=bun-config,target=/home/ubuntu/.config,type=volume"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [3000, 8081, 8080]
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode"
}

61
.devcontainer/limits.conf Normal file
View File

@@ -0,0 +1,61 @@
# /etc/security/limits.conf
#
#Each line describes a limit for a user in the form:
#
#<domain> <type> <item> <value>
#
#Where:
#<domain> can be:
# - a user name
# - a group name, with @group syntax
# - the wildcard *, for default entry
# - the wildcard %, can be also used with %group syntax,
# for maxlogin limit
# - NOTE: group and wildcard limits are not applied to root.
# To apply a limit to the root user, <domain> must be
# the literal username root.
#
#<type> can have the two values:
# - "soft" for enforcing the soft limits
# - "hard" for enforcing hard limits
#
#<item> can be one of the following:
# - core - limits the core file size (KB)
# - data - max data size (KB)
# - fsize - maximum filesize (KB)
# - memlock - max locked-in-memory address space (KB)
# - nofile - max number of open file descriptors
# - rss - max resident set size (KB)
# - stack - max stack size (KB)
# - cpu - max CPU time (MIN)
# - nproc - max number of processes
# - as - address space limit (KB)
# - maxlogins - max number of logins for this user
# - maxsyslogins - max number of logins on the system
# - priority - the priority to run user process with
# - locks - max number of file locks the user can hold
# - sigpending - max number of pending signals
# - msgqueue - max memory used by POSIX message queues (bytes)
# - nice - max nice priority allowed to raise to values: [-20, 19]
# - rtprio - max realtime priority
# - chroot - change root to directory (Debian-specific)
#
#<domain> <type> <item> <value>
#
* soft memlock 33554432
* hard memlock 33554432
* soft nofile 33554432
* hard nofile 33554432
#* soft core 0
#root hard core 100000
#* hard rss 10000
#@student hard nproc 20
#@faculty soft nproc 20
#@faculty hard nproc 50
#ftp hard nproc 0
#ftp - chroot /ftp
#@student - maxlogins 4
# End of file

View File

@@ -0,0 +1,454 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="apt-utils \
openssh-client \
gnupg2 \
dirmngr \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust[0-9] \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
. /etc/os-release
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
else
apt_get_update_if_needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
package_list="${package_list} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${package_list}"
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apt-get -y install --no-install-recommends git
fi
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt_get_update_if_needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
group_name="${USERNAME}"
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
group_name="$(id -gn $USERNAME)"
groupmod --gid $USER_GID ${group_name}
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
user_rc_path="/root"
else
user_rc_path="/home/${USERNAME}"
fi
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
fi
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
cp /etc/skel/.profile "${user_rc_path}/.profile"
fi
# .bashrc/.zshrc snippet
rc_snippet="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
# Set the default git editor if not already set
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
if [ "${TERM_PROGRAM}" = "vscode" ]; then
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
export GIT_EDITOR="code-insiders --wait"
else
export GIT_EDITOR="code --wait"
fi
fi
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
codespaces_bash="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
codespaces_zsh="$(cat \
<<'EOF'
# Codespaces zsh prompt theme
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/bash.bashrc
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${codespaces_bash}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w
mkdir -p ${oh_my_install_dir}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
mkdir -p ${oh_my_install_dir}/custom/themes
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
meta_info_script="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"

View File

@@ -0,0 +1,16 @@
#!/bin/bash
echo "To get started, login to GitHub and clone bun's GitHub repo into /build/bun"
echo "If it fails to open a browser, login with a Personal Access Token instead"
echo "# First time setup"
echo "gh auth login"
echo "gh repo clone oven-sh/bun . -- --depth=1 --progress -j8"
echo ""
echo "# Compile bun dependencies (zig is already compiled)"
echo "make devcontainer"
echo ""
echo "# Build bun for development"
echo "make dev"
echo ""
echo "# Run bun"
echo "bun-debug"

View File

@@ -0,0 +1,207 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./github-debian.sh [version]
CLI_VERSION=${1:-"latest"}
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com"
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
local keyring_args=""
if [ ! -z "$2" ]; then
keyring_args="--no-default-keyring --keyring $2"
fi
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Function to run apt-get if needed
apt_get_update_if_needed() {
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Checks if packages are installed and installs them if not
check_packages() {
if ! dpkg -s "$@" >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends "$@"
fi
}
# Fall back on direct download if no apt package exists
# Fetches .deb file to be installed with dpkg
install_deb_using_github() {
check_packages wget
arch=$(dpkg --print-architecture)
find_version_from_git_tags CLI_VERSION https://github.com/cli/cli
cli_filename="gh_${CLI_VERSION}_linux_${arch}.deb"
mkdir -p /tmp/ghcli
pushd /tmp/ghcli
wget https://github.com/cli/cli/releases/download/v${CLI_VERSION}/${cli_filename}
dpkg -i /tmp/ghcli/${cli_filename}
popd
rm -rf /tmp/ghcli
}
export DEBIAN_FRONTEND=noninteractive
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
if ! type git >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends git
fi
# Soft version matching
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
version_suffix="=${CLI_VERSION}"
else
version_suffix=""
fi
# Install the GitHub CLI
echo "Downloading github CLI..."
install_deb_using_github
# Method below does not work until cli/cli#6175 is fixed
# # Import key safely (new method rather than deprecated apt-key approach) and install
# . /etc/os-release
# receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
# echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" >/etc/apt/sources.list.d/github-cli.list
# apt-get update
# apt-get -y install "gh${version_suffix}"
# rm -rf "/tmp/gh/gnupg"
echo "Done!"

View File

@@ -0,0 +1,7 @@
#!/bin/bash
chsh -s $(which zsh)
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl" -- --yes
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix

View File

@@ -0,0 +1,8 @@
#!/bin/bash
curl -L https://github.com/zigtools/zls-vscode/releases/download/1.1.6/zls-vscode-1.1.6.vsix >/home/ubuntu/vscode-zig.vsix
git clone https://github.com/zigtools/zls /home/ubuntu/zls
cd /home/ubuntu/zls
git checkout aabdb0c6ecb3c9a47feff2c2bfb9be4e95adf723
git submodule update --init --recursive --progress --depth=1
zig build -Drelease-fast

View File

@@ -0,0 +1,9 @@
{
"folders": [
{
// Source code
"name": "bun",
"path": "bun"
},
]
}

9
.devcontainer/zls.json Normal file
View File

@@ -0,0 +1,9 @@
{
"zig_exe_path": "/build/zig/zig",
"enable_snippets": true,
"warn_style": false,
"enable_semantic_tokens": true,
"operator_completions": true,
"include_at_in_builtins": false,
"max_detail_length": 1048576
}

23
.gitattributes vendored
View File

@@ -8,26 +8,3 @@ src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation

View File

@@ -1,6 +1,6 @@
name: 📗 Documentation Issue
name: 📗 Documentation Request
description: Tell us if there is missing or incorrect documentation
labels: [docs]
labels: [documentation]
body:
- type: markdown
attributes:

View File

@@ -1,18 +0,0 @@
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
name: Deploy site
on:
push:
paths:
- "docs/**"
branches: [main]
jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

47
.github/workflows/bun-dockerhub.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: bun-dockerhub
on:
push:
paths:
- dockerhub/Dockerfile
branches:
- main
pull_request:
paths:
- dockerhub/Dockerfile
branches:
- main
release:
types:
- published
jobs:
docker:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Collect metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
${{ secrets.DOCKERHUB_USERNAME }}/bun
tags: |
type=match,pattern=bun-v(\d.\d.\d),group=1
type=match,pattern=bun-v(\d.\d),group=1
type=match,pattern=bun-v(\d),group=1
type=ref,event=branch
type=ref,event=pr
- name: Login to DockerHub
if: github.event_name == 'release'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build image
uses: docker/build-push-action@v3
with:
context: ./dockerhub
push: ${{ github.event_name == 'release' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -1,50 +0,0 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

30
.github/workflows/bun-homebrew.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: bun-homebrew
on:
release:
types:
- published
- edited
jobs:
homebrew:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ env.HOMEBREW_TOKEN }}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.6'
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Release ${{ github.event.release.tag_name }}

View File

@@ -1,136 +0,0 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -35,28 +30,34 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
@@ -127,20 +128,10 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -152,76 +143,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- uses: actions/upload-artifact@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies

View File

@@ -1,9 +1,4 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -106,18 +100,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -126,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -244,20 +237,19 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -265,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -341,7 +333,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -378,9 +370,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -392,75 +382,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-aarch64
runner: macos-arm64
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -106,18 +100,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -126,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,20 +238,19 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -266,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -345,7 +337,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -382,9 +374,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -396,75 +386,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -32,12 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -106,18 +100,17 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -126,16 +119,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -144,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -247,20 +240,19 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -268,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -347,7 +339,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -384,9 +376,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -398,75 +388,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -1,179 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -1,54 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

View File

@@ -1,257 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

137
.github/workflows/bun-types-release.yml vendored Normal file
View File

@@ -0,0 +1,137 @@
name: Release
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun install
- name: ESLint
run: bun run lint
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish on NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: 'https://registry.npmjs.org'
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish on NPM
run: cd packages/bun-types/dist/ && npm publish --access public
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: 'https://npm.pkg.github.com/'
scope: '@oven-sh'
- name: Install bun
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

View File

@@ -1,16 +1,16 @@
name: bun-types
name: TypeScript Types
on:
push:
paths:
- "packages/bun-types/**"
- packages/bun-types/**/*
branches: [main]
pull_request:
paths:
- "packages/bun-types/**"
- packages/bun-types/**/*
jobs:
tests:
name: type-tests
name: Build and Test
runs-on: ubuntu-latest
defaults:
run:
@@ -18,12 +18,13 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install bun
uses: oven-sh/setup-bun@v1
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
bun-version: latest
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
@@ -31,11 +32,13 @@ jobs:
node-version: latest
- name: Install dependencies
run: |
bun install
run: bun install
- name: Generate package
run: bun run build
- name: ESLint
run: bun run lint
- name: Tests
run: bun run test

View File

@@ -1,76 +0,0 @@
name: prettier
on:
pull_request:
branches:
- main
- jarred/test-actions
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- id: install
name: Install prettier
run: bun install
- name: Run prettier
id: fmt
run: |
rm -f .failed
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
if [ -s .failed ]; then
delimiter="$(openssl rand -hex 8)"
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
fi
- name: Comment on PR
if: steps.fmt.outputs.prettier_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
message: |
❌ @${{ github.actor }} `prettier` reported errors
```js
${{ steps.fmt.outputs.prettier_fmt_errs }}
```
To one-off fix this manually, run:
```sh
bun fmt
```
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.prettier_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `prettier` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Fail the job
if: steps.fmt.outputs.prettier_fmt_errs != ''
run: exit 1

View File

@@ -1,87 +0,0 @@
name: zig-fmt
env:
ZIG_VERSION: 0.11.0-dev.2571+31738de28
on:
pull_request:
branches:
- main
- jarred/test-actions
paths:
- "src/**/*.zig"
- "src/*.zig"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install zig
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
delimiter="$(openssl rand -hex 8)"
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
if [ -s zig-fmt.err ]; then
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
fi
if [ -s zig-fmt.err2 ]; then
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
fi
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
- name: Comment on PR
if: steps.fmt.outputs.zig_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
message: |
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
```zig
// # zig fmt --check src/*.zig src/**/*.zig
${{ steps.fmt.outputs.zig_fmt_errs }}
```
To one-off fix this manually, run:
```sh
zig fmt src/*.zig src/**/*.zig
```
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.zig_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `zig fmt` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Fail the job
if: steps.fmt.outputs.zig_fmt_errs != ''
run: exit 1

17
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -13,7 +14,7 @@ dist
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
/package-lock.json
build
*.wat
zig-out
@@ -45,7 +46,6 @@ outcss
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
@@ -110,14 +110,3 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/test.ts

4
.gitmodules vendored
View File

@@ -65,7 +65,3 @@ fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty

View File

@@ -1,13 +1,8 @@
src/fallback.html
src/bun.js/WebKit
src/js/out
src/*.out.js
src/*out.*.js
src/deps
src/test/fixtures
src/react-refresh.js
# src/test
test/bun.js/solid-dom-fixtures
test/bun.js/bundled
#src/bun.js/builtins
# src/api/demo
test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

7
.prettierrc Normal file
View File

@@ -0,0 +1,7 @@
{
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -1,15 +0,0 @@
module.exports = {
arrowParens: "avoid",
printWidth: 120,
trailingComma: "all",
useTabs: false,
quoteProps: "preserve",
overrides: [
{
files: "README.md",
options: {
printWidth: 80,
},
},
],
};

View File

@@ -7,7 +7,3 @@ if [ -d ./node_modules/bun-webkit ]; then
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

View File

@@ -12,18 +12,19 @@
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
@@ -31,16 +32,15 @@
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/**",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/bun.js/builtins/**",
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
"${workspaceFolder}/src/bun.js/modules/**",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"
@@ -57,7 +57,7 @@
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"compilerPath": "/opt/homebrew/opt/llvm/bin/clang++",
"cStandard": "c17",
"cppStandard": "c++20"
}

View File

@@ -1,10 +1,8 @@
{
"recommendations": [
"ziglang.vscode-zig",
"AugusteRame.zls-vscode",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb",
"bierner.comment-tagged-templates",
"ms-vscode.cpptools"
"vadimcn.vscode-lldb"
]
}

305
.vscode/launch.json generated vendored
View File

@@ -1,209 +1,105 @@
{
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
// It will force the garbage collector to run after every test and every call to expect()
// it makes our tests very slow
// But it helps catch memory bugs
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits:
// { "initCommands": ["process handle -p false -s false -n false SIGHUP"] }
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "bun test [file]",
"name": "bun test",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}/test/bun.js",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"name": "bun test (all)",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run current file",
"program": "bun-debug",
"args": ["${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (hot)",
"program": "bun-debug",
"args": ["--hot", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run",
"program": "bun-debug",
"args": ["check.tsx", "-c"],
"cwd": "${env:HOME}/Build/react-ssr",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"name": "bun http example",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/http.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"name": "bun http file example",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [Inspect]",
"program": "bun-debug",
"args": ["--inspect-brk", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (gc)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (verbose)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/http-file.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --watch",
"name": "bun html-rewriter example",
"program": "bun-debug",
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/html-rewriter.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --hot",
"program": "bun-debug",
"args": ["run", "--hot", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -218,100 +114,11 @@
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${workspaceFolder}",
"args": ["build", "--platform=bun", "--outdir=/tmp/testout", "${file}"],
"cwd": "${file}/../../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
},
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits.
"initCommands": ["process handle -p false -s false -n false SIGHUP"]
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug out.js",
"program": "bun-debug",
"args": ["--outfile=out.js", "bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug STDOUT",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (no splitting, browser entry)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-no-split",
"--platform=browser",
"bun",
"./quick.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-split",
"--server-components",
"--platform=bun",
"--splitting",
"bun",
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (NO splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsccheck",
"--server-components",
"--platform=bun",
"bun",
"/Users/jarred/Code/bun-rsc/pages/index.js"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{

48
.vscode/settings.json vendored
View File

@@ -13,7 +13,7 @@
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.defaultFormatter": "tiehuis.zig",
"editor.formatOnSave": true
},
"[ts]": {
@@ -24,8 +24,6 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"zig.zls.enableInlayHints": false,
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
@@ -34,22 +32,8 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[yaml]": {
"editor.formatOnSave": true
},
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
"comments": "off",
"strings": "off",
"other": "off"
}
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"lldb.verboseLogging": false,
"files.exclude": {
"**/.git": true,
@@ -75,9 +59,7 @@
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -200,25 +182,9 @@
"ethernet.h": "c",
"inet.h": "c",
"packet.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
"queue": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"eslint.workingDirectories": ["packages/bun-types"],
"typescript.tsdk": "node_modules/typescript/lib"
"C_Cpp.errorSquiggles": "Enabled",
"eslint.workingDirectories": ["packages/bun-types"]
}

View File

@@ -1,92 +0,0 @@
# Contributing to Bun
> **Important:** All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Bun's codebase
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
Today (February 2023), Bun's codebase has five distinct parts:
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
- Package manager ([`src/install/`](src/install/))
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
## Getting started
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
## Memory management in Bun
For the Zig code, please:
1. Do your best to avoid dynamically allocating memory.
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
## JavaScript runtime
Most of Bun's JavaScript runtime code lives in [`src/bun.js`](src/bun.js).
### Calling C++ from Zig & Zig from C++
TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and [`bindings.cpp`](src/bun.js/bindings/bindings.cpp) for now)
### Adding a new JavaScript class
1. Add a new file in [`src/bun.js/*.classes.ts`](src/bun.js) to define the instance and static methods for the class.
2. Add a new file in [`src/bun.js/**/*.zig`](src/bun.js) and expose the struct in [`src/bun.js/generated_classes_list.zig`](src/bun.js/generated_classes_list.zig)
3. Run `make codegen`
Copy from examples like `Subprocess` or `Response`.
### ESM modules
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
### JavaScript Builtins
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
To regenerate the builtins:
```sh
make clean-bindings && make generate-builtins && make bindings -j10
```
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)
### Strings
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
#### JavaScriptCore C API
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
## Testing
See [`test/README.md`](test/README.md) for information on how to run tests.

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=may20
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.6
ARG BUN_BASE_VERSION=0.4
FROM bitnami/minideb:bullseye as bun-base
@@ -122,8 +122,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
@@ -149,9 +149,6 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
@@ -306,6 +303,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
@@ -402,7 +401,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -427,7 +425,7 @@ ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
@@ -461,20 +459,19 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
# Required for webcrypto bindings
# Required for `make webcrypto`
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && make webcrypto && \
make release-bindings -j10 && mv ${BUN_DEPS_OUT_DIR}/libwebcrypto.a /tmp && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
@@ -487,14 +484,10 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -503,36 +496,10 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
COPY --from=compile_cpp /tmp/libwebcrypto.a /
FROM prepare_release as build_release
@@ -548,7 +515,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -562,7 +528,6 @@ COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/

126
Dockerfile.devcontainer Normal file
View File

@@ -0,0 +1,126 @@
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG BUILDARCH=amd64
ARG ARCH=x86_64
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
FROM --platform=linux/${BUILDARCH} ubuntu:22.04 as bun.devcontainer
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUN_PACKAGES_DIR
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
ENV PATH "$ZIG_PATH:$PATH"
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
ENV LIB_ICU_PATH ${WEBKIT_OUT_DIR}/lib
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
ENV PATH "${BUN_PACKAGES_DIR}/bun-linux-x64:${BUN_PACKAGES_DIR}/bun-linux-aarch64:${BUN_PACKAGES_DIR}/debug-bun-linux-x64:${BUN_PACKAGES_DIR}/debug-bun-linux-aarch64:$PATH"
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
ENV BUN_INSTALL /home/ubuntu/.bun
ENV XDG_CONFIG_HOME /home/ubuntu/.config
WORKDIR ${GITHUB_WORKSPACE}
RUN apt-get update && \
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
add-apt-repository ppa:longsleep/golang-backports && \
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
chmod +x llvm.sh && \
./llvm.sh 15 && \
curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
apt-get update && \
apt-get install --no-install-recommends -y \
ca-certificates \
curl \
gnupg2 \
software-properties-common \
cmake \
build-essential \
git \
libssl-dev \
ruby \
liblld-15-dev \
libclang-15-dev \
nodejs \
gcc \
g++ \
clang-15 \
clang-format-15 \
libc++-15-dev \
libc++abi-15-dev \
lld-15 \
libicu-dev \
wget \
rustc \
cargo \
unzip \
tar \
golang-go ninja-build pkg-config automake autoconf libtool curl && \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-15 90 && \
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-15 90 && \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-15 90 && \
npm install -g esbuild
ENV CC=clang-15
ENV CXX=clang++-15
ENV ZIG "${ZIG_PATH}/zig"
WORKDIR $GITHUB_WORKSPACE
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
WORKDIR $GITHUB_WORKSPACE
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ARG ZIG_URL
ADD $ZIG_URL .
RUN tar -xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-15 90
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/limits.conf /etc/security/limits.conf
COPY ".devcontainer/scripts/" /scripts/
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
COPY ".devcontainer/README.md" $GITHUB_WORKSPACE/README.md
ENV JSC_BASE_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR=$WEBKIT_DIR
ENV WEBKIT_DEBUG_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR_LTO=$WEBKIT_DIR
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
bash /scripts/common-debian.sh && \
bash /scripts/github.sh && \
bash /scripts/nice.sh && \
bash /scripts/zig-env.sh
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json

517
Makefile

File diff suppressed because it is too large Load Diff

5264
README.md

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@@ -1,171 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
esbuild

View File

@@ -1,40 +0,0 @@
# Bundler benchmark
This is a performance benchmark of the following bundlers:
- Bun
- esbuild
- Parcel 2
- Rollup + Terser
- Webpack
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
To run the benchmark:
```sh
$ chmod +x run-bench.sh
$ ./run-bench.sh
```
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
```sh
real <number>
user <number>
sys <number>
```
These lines are generated by the `time` command which is used to benchmark each build.
## Results
The `real` results, as run on a 16-inch M1 Macbook Pro:
| Bundler | Time |
| ------- | ------ |
| Bun | 0.17s |
| esbuild | 0.33s |
| Rollup | 18.82s |
| Webpack | 26.21 |
| Parcel | 17.95s |

Binary file not shown.

View File

@@ -1,8 +0,0 @@
{
"name": "bundle",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.5.0"
}
}

View File

@@ -1,3 +0,0 @@
git clone git@github.com:colinhacks/esbuild.git
cd esbuild
make bench-three

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -6,4 +6,7 @@ const arg = process.argv.slice(1);
// TODO: remove Buffer.from() when readFileSync() returns Buffer
for (let i = 0; i < count; i++) console.log(arg.map(file => Buffer.from(readFileSync(file, "utf8"))).join(""));
for (let i = 0; i < count; i++)
console.log(
arg.map((file) => Buffer.from(readFileSync(file, "utf8"))).join("")
);

View File

@@ -7,8 +7,14 @@ function runner(ready) {
for (let i = 0; i < size; i++) {
rand[i] = (Math.random() * 1024 * 1024) | 0;
}
const dest = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const dest = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
writeFileSync(src, Buffer.from(rand.buffer), { encoding: "buffer" });
const { size: fileSize } = statSync(src);
if (fileSize !== rand.byteLength) {
@@ -29,6 +35,6 @@ runner((src, dest, rand) =>
// );
// }
// }
}),
})
);
await run();

View File

@@ -1,31 +0,0 @@
import EventEmitter3 from "eventemitter3";
import { group } from "mitata";
import EventEmitterNative from "node:events";
export const implementations = [
{
EventEmitter: EventEmitterNative,
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
monkey: true,
},
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
].filter(Boolean);
for (const impl of implementations) {
impl.EventEmitter?.setMaxListeners?.(Infinity);
}
export function groupForEmitter(name, cb) {
if (implementations.length === 1) {
return cb({
...implementations[0],
name: `${name}: ${implementations[0].name}`,
});
} else {
return group(name, () => {
for (let impl of implementations) {
cb(impl);
}
});
}
}

View File

@@ -1,96 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("single emit", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
if (!called) throw new Error("not called");
});
});
// for (let { impl: EventEmitter, name, monkey } of []) {
// if (monkey) {
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
// monkeyEmitter.on("hello", event => {
// event.preventDefault();
// });
// bench(`[monkey] ${className}.emit`, () => {
// var called = false;
// monkeyEmitter.emit("hello", {
// preventDefault() {
// id++;
// called = true;
// },
// });
// if (!called) {
// throw new Error("monkey failed");
// }
// });
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
// var cb = () => {
// event.preventDefault();
// };
// monkeyEmitter.on("hey", cb);
// for (let i = 0; i < 10_000; i++)
// monkey.emit("hey", {
// preventDefault() {
// id++;
// },
// });
// monkeyEmitter.off("hey", cb);
// });
// }
// }
// var target = new EventTarget();
// target.addEventListener("hello", event => {});
// bench("EventTarget.dispatch", () => {
// target.dispatchEvent(event);
// });
// var hey = new Event("hey");
// bench("EventTarget.on x 10_000 (handler)", () => {
// var handler = event => {};
// target.addEventListener("hey", handler);
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
// target.removeEventListener("hey", handler);
// });
await run();

View File

@@ -1,40 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("test 1", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("test 2", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
await run();

View File

@@ -1,63 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
let rngState = 123456789;
function nextInt() {
const m = 0x80000000; // 2**31;
const a = 1103515245;
const c = 12345;
rngState = (a * rngState + c) % m;
return rngState;
}
function nextRange(start, end) {
// returns in range [start, end): including start, excluding end
// can't modulu nextInt because of weak randomness in lower bits
const rangeSize = end - start;
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
return start + Math.floor(randomUnder1 * rangeSize);
}
const chunks = new Array(1024).fill(null).map((_, j) => {
const arr = new Uint8Array(1024);
for (let i = 0; i < arr.length; i++) {
arr[i] = nextRange(0, 256);
}
return arr;
});
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
bench(name, () => {
let id = 0;
const stream = new EventEmitter();
stream.on("start", res => {
if (res.status !== 200) throw new Error("not 200");
});
const recived = [];
stream.on("data", req => {
recived.push(req);
});
stream.on("end", ev => {
ev.preventDefault();
});
// simulate a stream
stream.emit("start", { status: 200 });
for (let chunk of chunks) {
stream.emit("data", chunk);
}
stream.emit("end", {
preventDefault() {
id++;
},
});
if (id !== 1) throw new Error("not implemented right");
if (recived.length !== 1024) throw new Error("not implemented right");
});
});
await run();

Binary file not shown.

View File

@@ -1 +1 @@
console.log("Hello via Bun!");
console.log("Hello via Bun!");

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
);
}
await Promise.all(requests);

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
);
}
await Promise.all(requests);

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text()
);
}
await Promise.all(requests);

View File

@@ -1,7 +1,8 @@
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
import { run, bench, group } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
const { napiNoop, napiHash, napiString } = require(import.meta.dir +
"/src/ffi_napi_bench.node");
const {
symbols: {

View File

@@ -1,7 +1,10 @@
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
const path = new URL(
"src/target/release/libffi_napi_bench." + extension,
import.meta.url,
).pathname;
const {
symbols: { ffi_noop, ffi_hash, ffi_string },

Binary file not shown.

View File

@@ -29,7 +29,7 @@ loop:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cp src/colors.css.blob $(PROJECT)/colors.css.blob
loop-emotion:
@@ -40,7 +40,7 @@ loop-emotion:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cp src/css-in-js-styles.tsx.blob $(PROJECT)/css-in-js-styles.blob
process_video:

View File

@@ -24,7 +24,7 @@ if (process.env.PROJECT === "bun") {
// bunProcess.stderr.pipe(process.stderr);
// bunProcess.stdout.pipe(process.stdout);
bunProcess.once("error", err => {
bunProcess.once("error", (err) => {
console.error("❌ bun error", err);
process.exit(1);
});
@@ -32,15 +32,19 @@ if (process.env.PROJECT === "bun") {
bunProcess?.kill(0);
});
} else if (process.env.PROJECT === "next") {
const bunProcess = child_process.spawn("./node_modules/.bin/next", ["--port", "8080"], {
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
const bunProcess = child_process.spawn(
"./node_modules/.bin/next",
["--port", "8080"],
{
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
shell: false,
});
shell: false,
},
);
}
const delay = new Promise((resolve, reject) => {
@@ -107,7 +111,7 @@ async function main() {
return runPage();
}
main().catch(error =>
main().catch((error) =>
setTimeout(() => {
throw error;
}),

View File

@@ -4,7 +4,9 @@ const path = require("path");
const PROJECT = process.env.PROJECT || "bun";
const percentile = require("percentile");
const PACKAGE_NAME = process.env.PACKAGE_NAME;
const label = `${PACKAGE_NAME}@${require(PACKAGE_NAME + "/package.json").version}`;
const label = `${PACKAGE_NAME}@${
require(PACKAGE_NAME + "/package.json").version
}`;
const BASEFOLDER = path.resolve(PROJECT);
const OUTFILE = path.join(process.cwd(), process.env.OUTFILE);
@@ -18,10 +20,10 @@ const TOTAL_FRAMES = VALID_TIMES.length;
const timings = fs
.readFileSync(BASEFOLDER + "/frames.all.clean", "utf8")
.split("\n")
.map(a => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter(a => parseInt(a, 10))
.filter(a => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map(num => BigInt(num));
.map((a) => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter((a) => parseInt(a, 10))
.filter((a) => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map((num) => BigInt(num));
timings.sort();
@@ -45,7 +47,7 @@ const report = {
name: PACKAGE_NAME,
version: require(PACKAGE_NAME + "/package.json").version,
},
timestamps: timings.map(a => Number(a)),
timestamps: timings.map((a) => Number(a)),
frameTimes: frameTime,
percentileMs: {
50: percentile(50, frameTime) / 10,
@@ -65,7 +67,9 @@ fs.writeFileSync(
"." +
process.env.SLEEP_INTERVAL +
"ms." +
`${process.platform}-${process.arch === "arm64" ? "aarch64" : process.arch}` +
`${process.platform}-${
process.arch === "arm64" ? "aarch64" : process.arch
}` +
".json",
),
JSON.stringify(report, null, 2),
@@ -95,5 +99,9 @@ console.log(
timings.length,
"/",
TOTAL_FRAMES,
"(" + Math.round(Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100) + "%)",
"(" +
Math.round(
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100,
) +
"%)",
);

View File

@@ -3,7 +3,10 @@ import classNames from "classnames";
import ReactDOM from "react-dom";
const Base = ({}) => {
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
const name =
typeof location !== "undefined"
? decodeURIComponent(location.search.substring(1))
: null;
return <Main productName={name} />;
};

View File

@@ -4,8 +4,8 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<header>
<div className="Title">CSS HMR Stress Test!</div>
<p className="Description">
This page visually tests how quickly a bundler can update {props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot
Module Reloading.
This page visually tests how quickly a bundler can update{" "}
{props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
</p>
</header>
<main className="main">
@@ -19,7 +19,9 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="ProgressBar-container">
<div className="ProgressBar"></div>
</div>
<div className="SectionLabel">The progress bar should move from left to right smoothly.</div>
<div className="SectionLabel">
The progress bar should move from left to right smoothly.
</div>
</section>
<section>
@@ -40,15 +42,21 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="Spinner"></div>
</div>
</div>
<div className="SectionLabel">The spinners should rotate &amp; change color smoothly.</div>
<div className="SectionLabel">
The spinners should rotate &amp; change color smoothly.
</div>
</section>
</main>
<footer>
<div className="SectionLabel FooterLabel">There are no CSS animations on this page.</div>
<div className="SectionLabel FooterLabel">
There are no CSS animations on this page.
</div>
<div className="Bundler-container">
<div className="Bundler">{props.productName}</div>
<div className="Bundler-updateRate">{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}</div>
<div className="Bundler-updateRate">
{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}
</div>
</div>
</footer>
</>

View File

@@ -1,4 +0,0 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: ["@remix-run/eslint-config", "@remix-run/eslint-config/node"],
};

View File

@@ -1,10 +0,0 @@
node_modules
/.cache
/build
/public/build
.env
package-lock.json
yarn.lock
pnpm-lock.yaml
bun.lockb

View File

@@ -1,18 +0,0 @@
# `install` benchmark
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
```
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
```
To check that the app is working as expected:
```
$ bun run dev
$ npm run dev
$ yarn dev
$ pnpm dev
```
Then visit [http://localhost:3000](http://localhost:3000).

View File

@@ -1,18 +0,0 @@
/**
* By default, Remix will handle hydrating your app on the client for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
*/
import { RemixBrowser } from "@remix-run/react";
import { startTransition, StrictMode } from "react";
import { hydrateRoot } from "react-dom/client";
startTransition(() => {
hydrateRoot(
document,
<StrictMode>
<RemixBrowser />
</StrictMode>,
);
});

View File

@@ -1,101 +0,0 @@
/**
* By default, Remix will handle generating the HTTP Response for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
*/
import { PassThrough } from "node:stream";
import type { EntryContext } from "@remix-run/node";
import { Response } from "@remix-run/node";
import { RemixServer } from "@remix-run/react";
import isbot from "isbot";
import { renderToPipeableStream } from "react-dom/server";
const ABORT_DELAY = 5_000;
export default function handleRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return isbot(request.headers.get("user-agent"))
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
}
function handleBotRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onAllReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
responseStatusCode = 500;
console.error(error);
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}
function handleBrowserRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onShellReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
console.error(error);
responseStatusCode = 500;
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}

View File

@@ -1,20 +0,0 @@
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
export default function App() {
return (
<html lang="en">
<head>
<meta charSet="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<Meta />
<Links />
</head>
<body>
<Outlet />
<ScrollRestoration />
<Scripts />
<LiveReload />
</body>
</html>
);
}

View File

@@ -1,30 +0,0 @@
import type { V2_MetaFunction } from "@remix-run/node";
export const meta: V2_MetaFunction = () => {
return [{ title: "New Remix App" }];
};
export default function Index() {
return (
<div style={{ fontFamily: "system-ui, sans-serif", lineHeight: "1.4" }}>
<h1>Welcome to Remix</h1>
<ul>
<li>
<a target="_blank" href="https://remix.run/tutorials/blog" rel="noreferrer">
15m Quickstart Blog Tutorial
</a>
</li>
<li>
<a target="_blank" href="https://remix.run/tutorials/jokes" rel="noreferrer">
Deep Dive Jokes App Tutorial
</a>
</li>
<li>
<a target="_blank" href="https://remix.run/docs" rel="noreferrer">
Remix Docs
</a>
</li>
</ul>
</div>
);
}

View File

@@ -1,31 +0,0 @@
{
"private": true,
"sideEffects": false,
"scripts": {
"build": "remix build",
"dev": "remix dev",
"start": "remix-serve build",
"typecheck": "tsc",
"clean": "rm -rf node_modules",
"bench": "hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'"
},
"dependencies": {
"@remix-run/node": "^1.15.0",
"@remix-run/react": "^1.15.0",
"@remix-run/serve": "^1.15.0",
"isbot": "^3.6.5",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@remix-run/dev": "^1.15.0",
"@remix-run/eslint-config": "^1.15.0",
"@types/react": "^18.0.25",
"@types/react-dom": "^18.0.8",
"eslint": "^8.27.0",
"typescript": "^4.8.4"
},
"engines": {
"node": ">=14"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

View File

@@ -1,14 +0,0 @@
/** @type {import('@remix-run/dev').AppConfig} */
module.exports = {
ignoredRouteFiles: ["**/.*"],
// appDirectory: "app",
// assetsBuildDirectory: "public/build",
// serverBuildPath: "build/index.js",
// publicPath: "/build/",
future: {
v2_errorBoundary: true,
v2_meta: true,
v2_normalizeFormMethod: true,
v2_routeConvention: true,
},
};

View File

@@ -1,2 +0,0 @@
/// <reference types="@remix-run/dev" />
/// <reference types="@remix-run/node" />

View File

@@ -1,22 +0,0 @@
{
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ES2019"],
"isolatedModules": true,
"esModuleInterop": true,
"jsx": "react-jsx",
"moduleResolution": "node",
"resolveJsonModule": true,
"target": "ES2019",
"strict": true,
"allowJs": true,
"forceConsistentCasingInFileNames": true,
"baseUrl": ".",
"paths": {
"~/*": ["./app/*"]
},
// Remix takes care of building everything in `remix build`.
"noEmit": true
}
}

View File

@@ -1,8 +1,12 @@
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
bench("JSON.stringify({hello: 'world'})", () =>
JSON.stringify({ hello: "world" }),
);
const otherUint8Array = new Uint8Array(1024);
bench("Uint8Array.from(otherUint8Array)", () => Uint8Array.from(otherUint8Array));
bench("Uint8Array.from(otherUint8Array)", () =>
Uint8Array.from(otherUint8Array),
);
run();

View File

@@ -1,5 +1,7 @@
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
bench("console.log({ hello: 'object' })", () =>
console.log({ hello: "object" }),
);
await run();

View File

@@ -47,7 +47,11 @@ fs.writeFileSync(
output + `/file${count}.mjs`,
`
export const THE_END = true;
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");` : ""}
${
saveStack
? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");`
: ""
}
`,
"utf8",
);
@@ -56,7 +60,11 @@ fs.writeFileSync(
output + `/file${count}.js`,
`
module.exports.THE_END = true;
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.js");` : ""}
${
saveStack
? `globalThis.evaluationOrder.push("${output}/file${count}.js");`
: ""
}
`,
"utf8",
);

View File

@@ -1,24 +1,5 @@
import { bench, run } from "mitata";
import {
cpus,
endianness,
arch,
uptime,
networkInterfaces,
getPriority,
totalmem,
freemem,
homedir,
hostname,
loadavg,
platform,
release,
setPriority,
tmpdir,
type,
userInfo,
version,
} from "node:os";
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -1,24 +1,5 @@
import { bench, run } from "mitata";
import {
cpus,
endianness,
arch,
uptime,
networkInterfaces,
getPriority,
totalmem,
freemem,
homedir,
hostname,
loadavg,
platform,
release,
setPriority,
tmpdir,
type,
userInfo,
version,
} from "node:os";
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

1986
bench/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,11 @@
{
"name": "bench",
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"mitata": "^0.1.6",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"mitata": "^0.1.6"
"@swc/core": "^1.2.133",
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7"
},
"scripts": {
"ffi": "cd ffi && bun run deps && bun run build && bun run bench",
@@ -19,6 +17,5 @@
},
"devDependencies": {
"fast-deep-equal": "^3.1.3"
},
"prettier": "../.prettierrc.cjs"
}
}

Binary file not shown.

View File

@@ -1,16 +0,0 @@
{
"name": "react-hello-world",
"version": "1.0.0",
"description": "",
"main": "react-hello-world.node.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Colin McDonnell",
"license": "ISC",
"dependencies": {
"react": "next",
"react-dom": "next"
}
}

View File

@@ -18,8 +18,8 @@ const headers = {
};
Deno.serve(
async req => {
async (req) => {
return new Response(await renderToReadableStream(<App />), headers);
},
{ port: 8080 },
{ port: 8080 }
);

View File

@@ -1,9 +1,10 @@
// to run this:
// NODE_ENV=production bun react-hello-world.jsx
// bun --jsx-production react-hello-world.jsx
// Make sure you're using react-dom@18.3.0 or later.
// Currently that is available at react-dom@next (which is installed in this repository)
import { renderToReadableStream } from "react-dom/server";
// This will become the official react-dom/server.bun build a little later
// It will be the default when you import from "react-dom/server"
// That will work via the "bun" package.json export condition (which bun already supports)
import { renderToReadableStream } from "../../test/bun.js/react-dom-server.bun";
const headers = {
headers: {
"Content-Type": "text/html",

File diff suppressed because one or more lines are too long

View File

@@ -24,7 +24,9 @@ http
onShellError(error) {
// Something errored before we could complete the shell so we emit an alternative shell.
res.statusCode = 500;
res.send('<!doctype html><p>Loading...</p><script src="clientrender.js"></script>');
res.send(
'<!doctype html><p>Loading...</p><script src="clientrender.js"></script>'
);
},
onAllReady() {
// If you don't want streaming, use this instead of onShellReady.

Binary file not shown.

View File

@@ -8,6 +8,8 @@ const transpiler = new Bun.Transpiler({
console.time("Get exports");
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
for (let i = 0; i < ITERATIONS; i++) {
const imports = transpiler.scanImports(readFileSync("remix-route.ts", "utf8"));
const imports = transpiler.scanImports(
readFileSync("remix-route.ts", "utf8")
);
}
console.timeEnd("Get exports");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,7 +1,7 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const identity = x => x;
const identity = (x) => x;
for (let i = 0; i < 20; i++) {
var array = new Array(i);
@@ -22,15 +22,35 @@ bench("inline Array.map x 6", () => [1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 7", () => [1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 8", () => [1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 9", () => [1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 10", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 11", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 12", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 13", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 14", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 15", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 16", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 17", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 18", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 19", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 10", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 11", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 12", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 13", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 14", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 15", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 16", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 17", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 18", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 19", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

Some files were not shown because too many files have changed in this diff Show More