Compare commits

..

4 Commits

Author SHA1 Message Date
Colin McDonnell
12cbb28cbf Back to any 2023-03-29 21:31:00 -07:00
Colin McDonnell
3a0894e305 Fix tests 2023-03-29 21:30:03 -07:00
Colin McDonnell
2939316f9c Fix type tests 2023-03-29 21:25:39 -07:00
Colin McDonnell
73d96cc7d9 ReadableStream defaults to Uint8array 2023-03-29 21:21:38 -07:00
1097 changed files with 103057 additions and 199333 deletions

28
.devcontainer/README.md Normal file
View File

@@ -0,0 +1,28 @@
# Bun's Dev Container
To get started, login to GitHub and clone bun's GitHub repo into `/build/bun`
# First time setup
```bash
gh auth login # if it fails to open a browser, use Personal Access Token instead
gh repo clone oven-sh/bun . -- --depth=1 --progress -j8
```
# Compile bun dependencies (zig is already compiled)
```bash
make devcontainer
```
# Build bun for development
```bash
make dev
```
# Run bun
```bash
bun-debug help
```

View File

@@ -0,0 +1,70 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
{
"name": "bun (Ubuntu)",
// Sets the run context to one level up instead of the .devcontainer folder.
"context": "..",
"hostRequirements": { "memory": "16gb" },
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
"dockerFile": "../Dockerfile.devcontainer",
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": "/bin/zsh",
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
"zig.zigPath": "/build/zig/zig",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"AugusteRame.zls-vscode",
"ms-vscode.cpptools",
"/home/ubuntu/vscode-zig.vsix",
"vadimcn.vscode-lldb",
"esbenp.prettier-vscode",
"xaver.clang-format"
],
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; cat /build/README.md",
"build": {
"target": "bun.devcontainer",
"cacheFrom": ["ghcr.io/oven-sh/bun.devcontainer:latest"],
"args": {
"BUILDARCH": "${localEnv:DOCKER_BUILDARCH:amd64}",
"--platform": "linux/${localEnv:DOCKER_BUILDARCH:amd64}",
"--tag": "ghcr.io/oven-sh/bun.devcontainer:latest"
}
},
"runArgs": [
"--ulimit",
"memlock=-1:-1",
"--ulimit",
"nofile=65536:65536",
"--cap-add=SYS_PTRACE",
"--security-opt",
"seccomp=unconfined"
],
"workspaceMount": "source=bun,target=/build/bun,type=volume",
"workspaceFolder": "/build/bun",
"mounts": [
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
"source=bun-config,target=/home/ubuntu/.config,type=volume"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [3000, 8081, 8080]
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode"
}

61
.devcontainer/limits.conf Normal file
View File

@@ -0,0 +1,61 @@
# /etc/security/limits.conf
#
#Each line describes a limit for a user in the form:
#
#<domain> <type> <item> <value>
#
#Where:
#<domain> can be:
# - a user name
# - a group name, with @group syntax
# - the wildcard *, for default entry
# - the wildcard %, can be also used with %group syntax,
# for maxlogin limit
# - NOTE: group and wildcard limits are not applied to root.
# To apply a limit to the root user, <domain> must be
# the literal username root.
#
#<type> can have the two values:
# - "soft" for enforcing the soft limits
# - "hard" for enforcing hard limits
#
#<item> can be one of the following:
# - core - limits the core file size (KB)
# - data - max data size (KB)
# - fsize - maximum filesize (KB)
# - memlock - max locked-in-memory address space (KB)
# - nofile - max number of open file descriptors
# - rss - max resident set size (KB)
# - stack - max stack size (KB)
# - cpu - max CPU time (MIN)
# - nproc - max number of processes
# - as - address space limit (KB)
# - maxlogins - max number of logins for this user
# - maxsyslogins - max number of logins on the system
# - priority - the priority to run user process with
# - locks - max number of file locks the user can hold
# - sigpending - max number of pending signals
# - msgqueue - max memory used by POSIX message queues (bytes)
# - nice - max nice priority allowed to raise to values: [-20, 19]
# - rtprio - max realtime priority
# - chroot - change root to directory (Debian-specific)
#
#<domain> <type> <item> <value>
#
* soft memlock 33554432
* hard memlock 33554432
* soft nofile 33554432
* hard nofile 33554432
#* soft core 0
#root hard core 100000
#* hard rss 10000
#@student hard nproc 20
#@faculty soft nproc 20
#@faculty hard nproc 50
#ftp hard nproc 0
#ftp - chroot /ftp
#@student - maxlogins 4
# End of file

View File

@@ -0,0 +1,454 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="apt-utils \
openssh-client \
gnupg2 \
dirmngr \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust[0-9] \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
. /etc/os-release
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
else
apt_get_update_if_needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
package_list="${package_list} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${package_list}"
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apt-get -y install --no-install-recommends git
fi
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt_get_update_if_needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
group_name="${USERNAME}"
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
group_name="$(id -gn $USERNAME)"
groupmod --gid $USER_GID ${group_name}
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
user_rc_path="/root"
else
user_rc_path="/home/${USERNAME}"
fi
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
fi
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
cp /etc/skel/.profile "${user_rc_path}/.profile"
fi
# .bashrc/.zshrc snippet
rc_snippet="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
# Set the default git editor if not already set
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
if [ "${TERM_PROGRAM}" = "vscode" ]; then
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
export GIT_EDITOR="code-insiders --wait"
else
export GIT_EDITOR="code --wait"
fi
fi
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
codespaces_bash="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
codespaces_zsh="$(cat \
<<'EOF'
# Codespaces zsh prompt theme
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/bash.bashrc
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${codespaces_bash}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w
mkdir -p ${oh_my_install_dir}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
mkdir -p ${oh_my_install_dir}/custom/themes
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
meta_info_script="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"

View File

@@ -0,0 +1,16 @@
#!/bin/bash
echo "To get started, login to GitHub and clone bun's GitHub repo into /build/bun"
echo "If it fails to open a browser, login with a Personal Access Token instead"
echo "# First time setup"
echo "gh auth login"
echo "gh repo clone oven-sh/bun . -- --depth=1 --progress -j8"
echo ""
echo "# Compile bun dependencies (zig is already compiled)"
echo "make devcontainer"
echo ""
echo "# Build bun for development"
echo "make dev"
echo ""
echo "# Run bun"
echo "bun-debug"

View File

@@ -0,0 +1,207 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./github-debian.sh [version]
CLI_VERSION=${1:-"latest"}
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
keyserver hkps://keys.openpgp.org
keyserver hkp://keyserver.pgp.com"
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Import the specified key in a variable name passed in as
receive_gpg_keys() {
get_common_setting $1
local keys=${!1}
get_common_setting GPG_KEY_SERVERS true
local keyring_args=""
if [ ! -z "$2" ]; then
keyring_args="--no-default-keyring --keyring $2"
fi
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/tmp-gnupg"
mkdir -p ${GNUPGHOME}
chmod 700 ${GNUPGHOME}
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
# GPG key download sometimes fails for some reason and retrying fixes it.
local retry_count=0
local gpg_ok="false"
set +e
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
echo "(*) Downloading GPG key..."
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
if [ "${gpg_ok}" != "true" ]; then
echo "(*) Failed getting key, retring in 10s..."
((retry_count++))
sleep 10s
fi
done
set -e
if [ "${gpg_ok}" = "false" ]; then
echo "(!) Failed to get gpg key."
exit 1
fi
}
# Function to run apt-get if needed
apt_get_update_if_needed() {
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Checks if packages are installed and installs them if not
check_packages() {
if ! dpkg -s "$@" >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends "$@"
fi
}
# Fall back on direct download if no apt package exists
# Fetches .deb file to be installed with dpkg
install_deb_using_github() {
check_packages wget
arch=$(dpkg --print-architecture)
find_version_from_git_tags CLI_VERSION https://github.com/cli/cli
cli_filename="gh_${CLI_VERSION}_linux_${arch}.deb"
mkdir -p /tmp/ghcli
pushd /tmp/ghcli
wget https://github.com/cli/cli/releases/download/v${CLI_VERSION}/${cli_filename}
dpkg -i /tmp/ghcli/${cli_filename}
popd
rm -rf /tmp/ghcli
}
export DEBIAN_FRONTEND=noninteractive
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
if ! type git >/dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends git
fi
# Soft version matching
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
version_suffix="=${CLI_VERSION}"
else
version_suffix=""
fi
# Install the GitHub CLI
echo "Downloading github CLI..."
install_deb_using_github
# Method below does not work until cli/cli#6175 is fixed
# # Import key safely (new method rather than deprecated apt-key approach) and install
# . /etc/os-release
# receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
# echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" >/etc/apt/sources.list.d/github-cli.list
# apt-get update
# apt-get -y install "gh${version_suffix}"
# rm -rf "/tmp/gh/gnupg"
echo "Done!"

View File

@@ -0,0 +1,7 @@
#!/bin/bash
chsh -s $(which zsh)
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl" -- --yes
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix

View File

@@ -0,0 +1,7 @@
#!/bin/bash
git clone https://github.com/zigtools/zls /home/ubuntu/zls
cd /home/ubuntu/zls
git checkout 30869d7d8741656448e46fbf14f14da9ca7e5a21
git submodule update --init --recursive --progress --depth=1
zig build -Doptimize=ReleaseFast

View File

@@ -0,0 +1,9 @@
{
"folders": [
{
// Source code
"name": "bun",
"path": "bun"
},
]
}

9
.devcontainer/zls.json Normal file
View File

@@ -0,0 +1,9 @@
{
"zig_exe_path": "/build/zig/zig",
"enable_snippets": true,
"warn_style": false,
"enable_semantic_tokens": true,
"operator_completions": true,
"include_at_in_builtins": false,
"max_detail_length": 1048576
}

23
.gitattributes vendored
View File

@@ -8,26 +8,3 @@ src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation

View File

@@ -12,7 +12,6 @@ jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -1,50 +0,0 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -26,7 +21,6 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -36,7 +30,7 @@ jobs:
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -35,7 +30,6 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
@@ -46,7 +40,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
@@ -54,7 +48,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
@@ -158,7 +152,7 @@ jobs:
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -187,8 +181,8 @@ jobs:
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -1,9 +1,4 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -126,7 +119,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -173,9 +166,9 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
@@ -244,7 +237,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -257,7 +249,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -265,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -291,8 +283,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
@@ -341,7 +333,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -396,8 +388,8 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -426,8 +418,8 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -126,7 +119,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -173,9 +166,9 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
@@ -245,7 +238,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -258,7 +250,7 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -266,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -292,8 +284,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
@@ -345,7 +337,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -400,8 +392,8 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -430,8 +422,8 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -126,7 +119,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -144,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -173,8 +166,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
if: matrix.compile_obj
@@ -247,7 +240,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -260,7 +252,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -268,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-1/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -294,8 +286,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
env:
@@ -347,7 +339,7 @@ jobs:
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -402,8 +394,8 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -432,8 +424,8 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -8,7 +8,6 @@ jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -40,8 +39,6 @@ jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -110,7 +107,6 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -148,32 +144,3 @@ jobs:
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -1,4 +1,4 @@
name: bun-release-types-canary
name: bun-release-canary
concurrency: release-canary
on:
push:
@@ -11,7 +11,6 @@ jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types

View File

@@ -14,7 +14,6 @@ jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -53,8 +52,6 @@ jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -86,8 +83,6 @@ jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
@@ -130,7 +125,6 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -180,8 +174,6 @@ jobs:
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -219,39 +211,3 @@ jobs:
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

31
.github/workflows/bun-typecheck.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: typecheck
on:
push:
branches: [main]
paths:
- "packages/bun-types/**"
- "test/**"
pull_request:
paths:
- "packages/bun-types/**"
- "test/**"
jobs:
tests:
name: check-tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: test
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install dependencies
run: bun install
- name: Typecheck tests
run: bun run typecheck

59
.github/workflows/run-test-manually.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
name: Run Tests Manually
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
version:
description: "Version"
required: true
default: "canary"
type: string
use_bun:
description: "Use Bun?"
required: true
default: true
type: boolean
jobs:
linux-test:
name: Tests ${{matrix.tag}} ${{github.event.inputs.version}}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: install-npm
name: Install (npm)
run: |
npm install @oven/bun-${{matrix.tag}}@${{github.event.inputs.version}}
chmod +x node_modules/@oven/bun-${{matrix.tag}}/bin/bun
sudo cp node_modules/@oven/bun-${{matrix.tag}}/bin/bun /usr/bin/bun
- id: test
name: Test
if: ${{github.event.inputs.use_bun == 'true'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
bun run test
- id: test-node-runner
name: Test (node runner)
if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
node src/runner.node.mjs

View File

@@ -1,7 +1,7 @@
name: zig-fmt
env:
ZIG_VERSION: 0.11.0-dev.3737+9eb008717
ZIG_VERSION: 0.11.0-dev.1783+436e99d13
on:
pull_request:
@@ -28,7 +28,7 @@ jobs:
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |

13
.gitignore vendored
View File

@@ -13,7 +13,7 @@ dist
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
/package-lock.json
build
*.wat
zig-out
@@ -45,7 +45,6 @@ outcss
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
@@ -110,16 +109,8 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/test.ts
src/js/out/modules_dev
myscript.sh

4
.gitmodules vendored
View File

@@ -65,7 +65,3 @@ fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty

View File

@@ -1,6 +1,6 @@
src/fallback.html
src/bun.js/WebKit
src/js/out
src/bun.js/builtins/js
src/*.out.js
src/*out.*.js
src/deps
@@ -10,4 +10,3 @@ test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

View File

@@ -6,7 +6,7 @@ module.exports = {
quoteProps: "preserve",
overrides: [
{
files: ["*.md"],
files: "README.md",
options: {
printWidth: 80,
},

View File

@@ -7,7 +7,3 @@ if [ -d ./node_modules/bun-webkit ]; then
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

View File

@@ -2,7 +2,9 @@
"configurations": [
{
"name": "Mac",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"forcedInclude": [
"${workspaceFolder}/src/bun.js/bindings/root.h"
],
"includePath": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
@@ -15,8 +17,8 @@
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src"
@@ -33,9 +35,8 @@
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/builtins/*",
"${workspaceFolder}/src/bun.js/builtins/cpp/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
@@ -56,7 +57,7 @@
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"compilerPath": "/opt/homebrew/opt/llvm/bin/clang++",
"cStandard": "c17",
"cppStandard": "c++20"
}

View File

@@ -1,10 +1,3 @@
{
"recommendations": [
"ziglang.vscode-zig",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb",
"bierner.comment-tagged-templates",
"ms-vscode.cpptools"
]
"recommendations": ["ziglang.vscode-zig", "esbenp.prettier-vscode", "xaver.clang-format", "vadimcn.vscode-lldb"]
}

315
.vscode/launch.json generated vendored
View File

@@ -1,195 +1,129 @@
{
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
// It will force the garbage collector to run after every test and every call to expect()
// it makes our tests very slow
// But it helps catch memory bugs
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits:
// { "initCommands": ["process handle -p false -s false -n false SIGHUP"] }
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "bun test [file]",
"name": "bun test",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"name": "bun test --watch",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["--watch", "test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test (all)",
"program": "bun-debug",
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run current file",
"program": "bun-debug",
"args": ["${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (watch)",
"program": "bun-debug",
"args": ["--watch", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (hot)",
"program": "bun-debug",
"args": ["--hot", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run",
"program": "bun-debug",
"args": ["check.tsx", "-c"],
"cwd": "${env:HOME}/Build/react-ssr",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"name": "bun http example",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/http.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"name": "bun http file example",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (gc)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (verbose)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/http-file.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --watch",
"name": "bun html-rewriter example",
"program": "bun-debug",
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/html-rewriter.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --hot",
"program": "bun-debug",
"args": ["run", "--hot", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -204,100 +138,11 @@
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${workspaceFolder}",
"args": ["build", "--platform=bun", "--outdir=/tmp/testout", "${file}"],
"cwd": "${file}/../../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
},
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits.
"initCommands": ["process handle -p false -s false -n false SIGHUP"]
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug out.js",
"program": "bun-debug",
"args": ["--outfile=out.js", "bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug STDOUT",
"program": "bun-debug",
"args": ["bun", "${file}"],
"cwd": "${file}/../",
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (no splitting, browser entry)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-no-split",
"--platform=browser",
"bun",
"./quick.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-split",
"--server-components",
"--platform=bun",
"--splitting",
"bun",
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug (NO splitting, rsc)",
"program": "bun-debug",
"args": [
"--entry-names=./[name].[ext]",
"--outdir=/Users/jarred/Code/bun-rsc/.rsccheck",
"--server-components",
"--platform=bun",
"bun",
"/Users/jarred/Code/bun-rsc/pages/index.js"
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
"env": {
"NODE_ENV": "production"
// "BUN_DEBUG_QUIET_LOGS": "1"
// "BUN_DUMP_SYMBOLS": "1"
}
},
{

38
.vscode/settings.json vendored
View File

@@ -7,7 +7,10 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildArgs": [
"obj",
"-Dfor-editor"
],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"[zig]": {
@@ -24,8 +27,6 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"zig.zls.enableInlayHints": false,
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
@@ -37,11 +38,13 @@
"[yaml]": {
"editor.formatOnSave": true
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "yzhang.markdown-all-in-one",
"editor.formatOnSave": true,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
@@ -75,9 +78,7 @@
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -204,21 +205,12 @@
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
"__verbose_abort": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"eslint.workingDirectories": ["packages/bun-types"],
"typescript.tsdk": "node_modules/typescript/lib"
}
"C_Cpp.errorSquiggles": "Enabled",
"eslint.workingDirectories": [
"packages/bun-types"
],
"files.insertFinalNewline": true
}

View File

@@ -1,6 +1,6 @@
# Contributing to Bun
> **Important:** All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Bun's codebase
@@ -16,10 +16,6 @@ Today (February 2023), Bun's codebase has five distinct parts:
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
## Getting started
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
## Memory management in Bun
For the Zig code, please:
@@ -59,9 +55,7 @@ The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loade
### JavaScript Builtins
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
JavaScript builtins are located in [`src/bun.js/builtins/*.js`](src/bun.js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=may20-1
ARG WEBKIT_TAG=feb9
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.3737+9eb008717"
ARG ZIG_VERSION="0.11.0-dev.1783+436e99d13"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.6
ARG BUN_BASE_VERSION=0.5
FROM bitnami/minideb:bullseye as bun-base
@@ -468,13 +468,13 @@ WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
# Required for webcrypto bindings
# Required for `make webcrypto`
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && make webcrypto && \
make release-bindings -j10 && mv ${BUN_DEPS_OUT_DIR}/libwebcrypto.a /tmp && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
@@ -503,36 +503,10 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
COPY --from=compile_cpp /tmp/libwebcrypto.a /
FROM prepare_release as build_release
@@ -562,7 +536,6 @@ COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/

126
Dockerfile.devcontainer Normal file
View File

@@ -0,0 +1,126 @@
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG BUILDARCH=amd64
ARG ARCH=x86_64
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1783+436e99d13"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
FROM --platform=linux/${BUILDARCH} ubuntu:22.04 as bun.devcontainer
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUN_PACKAGES_DIR
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
ENV PATH "$ZIG_PATH:$PATH"
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
ENV LIB_ICU_PATH ${WEBKIT_OUT_DIR}/lib
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
ENV PATH "${BUN_PACKAGES_DIR}/bun-linux-x64:${BUN_PACKAGES_DIR}/bun-linux-aarch64:${BUN_PACKAGES_DIR}/debug-bun-linux-x64:${BUN_PACKAGES_DIR}/debug-bun-linux-aarch64:$PATH"
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
ENV BUN_INSTALL /home/ubuntu/.bun
ENV XDG_CONFIG_HOME /home/ubuntu/.config
WORKDIR ${GITHUB_WORKSPACE}
RUN apt-get update && \
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
add-apt-repository ppa:longsleep/golang-backports && \
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
chmod +x llvm.sh && \
./llvm.sh 15 && \
curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
apt-get update && \
apt-get install --no-install-recommends -y \
ca-certificates \
curl \
gnupg2 \
software-properties-common \
cmake \
build-essential \
git \
libssl-dev \
ruby \
liblld-15-dev \
libclang-15-dev \
nodejs \
gcc \
g++ \
clang-15 \
clang-format-15 \
libc++-15-dev \
libc++abi-15-dev \
lld-15 \
libicu-dev \
wget \
rustc \
cargo \
unzip \
tar \
golang-go ninja-build pkg-config automake autoconf libtool curl && \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-15 90 && \
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-15 90 && \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-15 90 && \
npm install -g esbuild
ENV CC=clang-15
ENV CXX=clang++-15
ENV ZIG "${ZIG_PATH}/zig"
WORKDIR $GITHUB_WORKSPACE
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
WORKDIR $GITHUB_WORKSPACE
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ARG ZIG_URL
ADD $ZIG_URL .
RUN tar -xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-15 90
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/limits.conf /etc/security/limits.conf
COPY ".devcontainer/scripts/" /scripts/
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
COPY ".devcontainer/README.md" $GITHUB_WORKSPACE/README.md
ENV JSC_BASE_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR=$WEBKIT_DIR
ENV WEBKIT_DEBUG_DIR=$WEBKIT_DIR
ENV WEBKIT_RELEASE_DIR_LTO=$WEBKIT_DIR
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
bash /scripts/common-debian.sh && \
bash /scripts/github.sh && \
bash /scripts/nice.sh && \
bash /scripts/zig-env.sh
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json

349
Makefile
View File

@@ -1,4 +1,4 @@
SHELL := $(shell which bash) # Use bash syntax to be consistent
SHELL := $(shell which bash) # Use bash syntax to be consistent
OS_NAME := $(shell uname -s | tr '[:upper:]' '[:lower:]')
ARCH_NAME_RAW := $(shell uname -m)
@@ -20,7 +20,6 @@ CPU_TARGET ?= native
MARCH_NATIVE = -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH =
MMD_IF_LOCAL =
DEFAULT_MIN_MACOS_VERSION=
ARCH_NAME :=
DOCKER_BUILDARCH =
@@ -40,17 +39,11 @@ NATIVE_OR_OLD_MARCH = -march=nehalem
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 0.6
CI ?= false
BUN_BASE_VERSION = 0.5
AR=
ifeq ($(CI), false)
MMD_IF_LOCAL = -MMD
endif
BUN_OR_NODE = $(shell which bun 2>/dev/null || which node 2>/dev/null)
BUN_OR_NODE = $(shell which bun || which node)
CXX_VERSION=c++2a
TRIPLET = $(OS_NAME)-$(ARCH_NAME)
@@ -59,32 +52,30 @@ PACKAGES_REALPATH = $(realpath packages)
PACKAGE_DIR = $(PACKAGES_REALPATH)/$(PACKAGE_NAME)
DEBUG_PACKAGE_DIR = $(PACKAGES_REALPATH)/debug-$(PACKAGE_NAME)
RELEASE_BUN = $(PACKAGE_DIR)/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)/
DEBUG_BUN = $(DEBUG_BIN)/bun-debug
BUILD_ID = $(shell cat ./src/build-id)
PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
PRETTIER ?= $(shell which prettier 2>/dev/null || echo "./node_modules/.bin/prettier")
ESBUILD = $(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")
DSYMUTIL ?= $(shell which dsymutil 2>/dev/null || which dsymutil-15 2>/dev/null)
PRETTIER ?= $(shell which prettier || echo "./node_modules/.bin/prettier")
DSYMUTIL ?= $(shell which dsymutil || which dsymutil-15)
WEBKIT_DIR ?= $(realpath src/bun.js/WebKit)
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug
WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO
NPM_CLIENT ?= $(shell which bun 2>/dev/null || which npm 2>/dev/null)
ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable")
NPM_CLIENT ?= $(shell which bun || which npm)
ZIG ?= $(shell which zig || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable")
# We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore
# If we don't do this, strange memory allocation failures occur.
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-15 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-15 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-15 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-15 || which clang)
REAL_CXX = $(shell which clang++-15 || which clang++)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -94,7 +85,7 @@ CCACHE_PATH := $(shell which ccache 2>/dev/null)
CCACHE_CC_FLAG = CC=$(CCACHE_CC_OR_CC)
ifeq (,$(findstring,$(shell which ccache 2>/dev/null),ccache))
ifeq (,$(findstring,$(shell which ccache),ccache))
CMAKE_CXX_COMPILER_LAUNCHER_FLAG := -DCMAKE_CXX_COMPILER_LAUNCHER=$(CCACHE_PATH) -DCMAKE_C_COMPILER_LAUNCHER=$(CCACHE_PATH)
CCACHE_CC_OR_CC := "$(CCACHE_PATH) $(REAL_CC)"
export CCACHE_COMPILERTYPE = clang
@@ -126,7 +117,7 @@ ifeq ($(OS_NAME),darwin)
endif
# macOS sed is different
SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
SED = $(shell which gsed || which sed)
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
@@ -155,7 +146,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-15-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
-DCMAKE_RANLIB=$(which llvm-15-ranlib || which llvm-ranlib)
@@ -177,11 +168,11 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-15 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-15 || which llvm-ar || which ar)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE)
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_TMP_DIR := /tmp/make-bun
@@ -193,13 +184,14 @@ DEFAULT_USE_BMALLOC := 1
USE_BMALLOC ?= DEFAULT_USE_BMALLOC
# Set via postinstall
ifeq (,$(realpath $(JSC_BASE_DIR)))
JSC_BASE_DIR = $(realpath $(firstword $(wildcard bun-webkit)))
ifeq (,$(JSC_BASE_DIR))
JSC_BASE_DIR = $(HOME)/webkit-build
endif
AUTO_JSX_BASE_DIR ?= $(realpath $(firstword $(wildcard bun-webkit)))
ifeq (,$(AUTO_JSX_BASE_DIR))
AUTO_JSX_BASE_DIR ?= $(HOME)/webkit-build
endif
JSC_BASE_DIR ?= $(AUTO_JSX_BASE_DIR)
DEFAULT_JSC_LIB :=
DEFAULT_JSC_LIB_DEBUG :=
@@ -274,7 +266,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-15 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip || which llvm-strip-15 || which strip || echo "Missing strip")
endif
@@ -293,32 +285,26 @@ SRC_WEBCORE_FILES := $(wildcard $(SRC_DIR)/webcore/*.cpp)
SRC_SQLITE_FILES := $(wildcard $(SRC_DIR)/sqlite/*.cpp)
SRC_NODE_OS_FILES := $(wildcard $(SRC_DIR)/node_os/*.cpp)
SRC_IO_FILES := $(wildcard src/io/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/js/out/*.cpp)
SRC_WEBCRYPTO_FILES := $(wildcard $(SRC_DIR)/webcrypto/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/bun.js/builtins/*.cpp)
OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(SRC_FILES))
WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
IO_FILES := $(patsubst src/io/%.cpp,$(OBJ_DIR)/%.o,$(SRC_IO_FILES))
MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(MODULES_FILES))
WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCRYPTO_FILES))
DEBUG_OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_FILES))
DEBUG_WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
DEBUG_SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
DEBUG_NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_IO_FILES := $(patsubst src/io/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_IO_FILES))
DEBUG_MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(MODULES_FILES))
DEBUG_WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp, $(DEBUG_OBJ_DIR)/%.o, $(SRC_WEBCRYPTO_FILES))
BINDINGS_OBJ := $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILES) $(NODE_OS_OBJ_FILES) $(BUILTINS_OBJ_FILES) $(IO_FILES) $(MODULES_OBJ_FILES) $(WEBCRYPTO_OBJ_FILES)
DEBUG_BINDINGS_OBJ := $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES) $(DEBUG_WEBCRYPTO_OBJ_FILES)
-include $(BINDINGS_OBJ:.o=.d)
-include $(DEBUG_BINDINGS_OBJ:.o=.d)
BINDINGS_OBJ := $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILES) $(NODE_OS_OBJ_FILES) $(BUILTINS_OBJ_FILES) $(IO_FILES) $(MODULES_OBJ_FILES)
DEBUG_BINDINGS_OBJ := $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES)
ALL_JSC_INCLUDE_DIRS := -I$(WEBKIT_RELEASE_DIR)/WTF/Headers \
-I$(WEBKIT_RELEASE_DIR)/ICU/Headers \
@@ -330,12 +316,12 @@ ALL_JSC_INCLUDE_DIRS := -I$(WEBKIT_RELEASE_DIR)/WTF/Headers \
-I$(WEBKIT_RELEASE_DIR)/WTF/PrivateHeaders
SHARED_INCLUDE_DIR = -I$(realpath src/bun.js/bindings)/ \
-I$(realpath src/js/builtins/) \
-I$(realpath src/js/out/) \
-I$(realpath src/bun.js/builtins/) \
-I$(realpath src/bun.js/bindings) \
-I$(realpath src/bun.js/bindings/webcore) \
-I$(realpath src/bun.js/bindings/webcrypto) \
-I$(realpath src/bun.js/bindings/sqlite) \
-I$(realpath src/bun.js/builtins/cpp) \
-I$(realpath src/bun.js/bindings/node_os) \
-I$(realpath src/bun.js/modules) \
-I$(JSC_INCLUDE_DIR)
@@ -352,7 +338,7 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
ifeq ($(OS_NAME),linux)
@@ -460,7 +446,6 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-ltcc \
-lusockets \
-lcares \
-lzstd \
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
@@ -514,9 +499,9 @@ BUN_LLD_FLAGS_WITHOUT_JSC = $(ARCHIVE_FILES) \
BUN_LLD_FLAGS = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES) $(BINDINGS_OBJ)
BUN_LLD_FLAGS_DEBUG = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(DEBUG_BINDINGS_OBJ)
BUN_LLD_FLAGS_FAST = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(BINDINGS_OBJ)
BUN_LLD_FLAGS = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES) $(BINDINGS_OBJ) -lwebcrypto
BUN_LLD_FLAGS_DEBUG = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(DEBUG_BINDINGS_OBJ) -lwebcrypto-debug
BUN_LLD_FLAGS_FAST = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(BINDINGS_OBJ) -lwebcrypto-debug
CLANG_VERSION = $(shell $(CC) --version | awk '/version/ {for(i=1; i<=NF; i++){if($$i=="version"){split($$(i+1),v,".");print v[1]}}}')
@@ -527,16 +512,14 @@ bun:
npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
npm-install-dev:
$(NPM_CLIENT) install
cd test && $(NPM_CLIENT) install
cd packages/bun-types && $(NPM_CLIENT) install --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
# Prevent dependency on libtcc1 so it doesn't do filesystem lookups
TINYCC_CFLAGS= -DTCC_LIBTCC1=\"\0\"
@@ -551,18 +534,24 @@ tinycc:
make -j10 && \
cp $(TINYCC_DIR)/*.a $(BUN_DEPS_OUT_DIR)
PYTHON=$(shell which python 2>/dev/null || which python3 2>/dev/null || which python2 2>/dev/null)
.PHONY: builtins
builtins:
NODE_ENV=production bun src/js/builtins/codegen/index.ts --minify
.PHONY: esm
esm:
NODE_ENV=production bun src/js/build-esm.ts
esm-debug:
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
builtins: ## to generate builtins
rm -f src/bun.js/bindings/*Builtin*.cpp src/bun.js/bindings/*Builtin*.h src/bun.js/bindings/*Builtin*.cpp
rm -rf src/bun.js/builtins/cpp
mkdir -p src/bun.js/builtins/cpp
$(shell which python || which python2) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --force
$(shell which python || which python2) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --wrappers-only
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp
$(SED) -i -e 's/class JSDOMGlobalObject/using JSDOMGlobalObject = Zig::GlobalObject/' src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
# this is the one we actually build
mv src/bun.js/builtins/cpp/*JSBuiltin*.cpp src/bun.js/builtins
.PHONY: generate-builtins
generate-builtins: builtins
@@ -633,9 +622,6 @@ compile-ffi-test:
sqlite:
.PHONY: zstd
zstd:
cd $(BUN_DEPS_DIR)/zstd && rm -rf build-cmake-debug && cmake $(CMAKE_FLAGS) -DZSTD_BUILD_STATIC=ON -B build-cmake-debug -S build/cmake -G Ninja && ninja -C build-cmake-debug && cp build-cmake-debug/lib/libzstd.a $(BUN_DEPS_OUT_DIR)/libzstd.a
.PHONY: libarchive
libarchive:
@@ -673,27 +659,21 @@ require:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@15"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@esbuild --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@$(NPM_CLIENT) --version >/dev/null 2>&1 || (echo -e "ERROR: NPM client (bun or npm) is required."; exit 1)
@go version >/dev/null 2>&1 || (echo -e "ERROR: go is required."; exit 1)
@which aclocal > /dev/null || (echo -e "ERROR: automake is required. Install with:\n\n $(POSIX_PKG_MANAGER) install automake"; exit 1)
@which $(LIBTOOL) > /dev/null || (echo -e "ERROR: libtool is required. Install with:\n\n $(POSIX_PKG_MANAGER) install libtool"; exit 1)
@which ninja > /dev/null || (echo -e "ERROR: Ninja is required. Install with:\n\n $(POSIX_PKG_MANAGER) install $(PKGNAME_NINJA)"; exit 1)
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
@echo "You have the dependencies installed! Woo"
init-submodules:
git submodule update --init --recursive --progress --depth=1 --checkout
git submodule update --init --recursive --progress --depth=1
.PHONY: build-obj
build-obj:
$(ZIG) build obj -Doptimize=ReleaseFast -Dcpu="$(CPU_TARGET)"
.PHONY: build-obj-small
build-obj-small:
$(ZIG) build obj -Doptimize=ReleaseSmall -Dcpu="$(CPU_TARGET)"
.PHONY: dev-build-obj-wasm
dev-build-obj-wasm:
$(ZIG) build bun-wasm -Dtarget=wasm32-freestanding
@@ -734,10 +714,10 @@ wasm: api build-obj-wasm-small
@cp src/api/schema.d.ts packages/bun-wasm/schema.d.ts
@cp src/api/schema.js packages/bun-wasm/schema.js
@cd packages/bun-wasm && $(NPM_CLIENT) run tsc -- -p .
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
@esbuild --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.mjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.mjs.map
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@esbuild --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.cjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.cjs.map
@rm -rf packages/bun-wasm/*.tsbuildinfo
@@ -772,10 +752,10 @@ sign-macos-aarch64:
gon sign.macos-aarch64.json
cls:
@echo -e "\n\n---\n\n"
@echo "\n\n---\n\n"
jsc-check:
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1)
@ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1)
@@ -798,7 +778,7 @@ release-safe: prerelease release-safe-only
.PHONY: fmt-cpp
fmt-cpp:
cd src/bun.js/bindings && $(CLANG_FORMAT) *.cpp *.h -i
cd src/bun.js/bindings && clang-format *.cpp *.h -i
.PHONY: fmt-zig
fmt-zig:
@@ -820,21 +800,21 @@ node-fallbacks:
.PHONY: fallback_decoder
fallback_decoder:
@$(ESBUILD) --target=esnext --bundle src/fallback.ts --format=iife --platform=browser --minify > src/fallback.out.js
@esbuild --target=esnext --bundle src/fallback.ts --format=iife --platform=browser --minify > src/fallback.out.js
.PHONY: runtime_js
runtime_js:
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
@NODE_ENV=production esbuild --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
.PHONY: runtime_js_dev
runtime_js_dev:
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
@NODE_ENV=development esbuild --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
@NODE_ENV=development esbuild --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
@NODE_ENV=development esbuild --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
@NODE_ENV=development esbuild --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
.PHONY: bun_error
bun_error:
@@ -843,7 +823,7 @@ bun_error:
.PHONY: generate-install-script
generate-install-script:
@rm -f $(PACKAGES_REALPATH)/bun/install.js
@$(ESBUILD) --log-level=error --define:BUN_VERSION="\"$(PACKAGE_JSON_VERSION)\"" --define:process.env.NODE_ENV="\"production\"" --platform=node --format=cjs $(PACKAGES_REALPATH)/bun/install.ts > $(PACKAGES_REALPATH)/bun/install.js
@esbuild --log-level=error --define:BUN_VERSION="\"$(PACKAGE_JSON_VERSION)\"" --define:process.env.NODE_ENV="\"production\"" --platform=node --format=cjs $(PACKAGES_REALPATH)/bun/install.ts > $(PACKAGES_REALPATH)/bun/install.js
.PHONY: fetch
fetch: $(IO_FILES)
@@ -913,17 +893,38 @@ bun-codesign-release-local:
bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-build
jsc-build: $(JSC_BUILD_STEPS)
.PHONY: jsc-bindings
jsc-bindings: headers bindings
jsc-bindings: headers bindings webcrypto-debug webcrypto
.PHONY: clone-submodules
clone-submodules:
git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress
.PHONY: devcontainer
devcontainer: $(OBJ_DIR) $(DEBUG_OBJ_DIR) clone-submodules mimalloc zlib libarchive boringssl picohttp identifier-cache node-fallbacks npm-install api analytics bun_error fallback_decoder bindings uws lolhtml usockets tinycc c-ares runtime_js_dev sqlite webcrypto-debug webcrypto
.PHONY: devcontainer-build
devcontainer-build:
DOCKER_BUILDARCH="$(DOCKER_BUILDARCH)" devcontainer build --workspace-folder .
.PHONY: devcontainer-up
devcontainer-up:
DOCKER_BUILDARCH="$(DOCKER_BUILDARCH)" devcontainer up --workspace-folder .
.PHONY: devcontainer-rebuild
devcontainer-rebuild:
DOCKER_BUILDARCH="$(DOCKER_BUILDARCH)" devcontainer up --workspace-folder . --remove-existing-container
.PHONY: devcontainer-sh
devcontainer-sh:
DOCKER_BUILDARCH="$(DOCKER_BUILDARCH)" devcontainer exec --workspace-folder . zsh
CLANG_FORMAT := $(shell command -v clang-format 2> /dev/null)
.PHONY: headers
headers:
@@ -1088,10 +1089,10 @@ dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
.PHONY: dev
dev: mkdir-dev esm dev-obj link
dev: mkdir-dev dev-obj bun-link-lld-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)
mkdir -p $(DEBUG_PACKAGE_DIR)/bin
test-all:
$(RELEASE_BUN) test
@@ -1114,10 +1115,11 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSModuleNamespaceObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSModuleNamespaceObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JIT.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JIT.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubInfo.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubInfo.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PolymorphicAccess.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PolymorphicAccess.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AccessCase.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AccessCase.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ObjectPropertyConditionSet.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ObjectPropertyConditionSet.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PolyProtoAccessChain.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PolyProtoAccessChain.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/InlineCacheCompiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/InlineCacheCompiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PutKind.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PutKind.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubClearingWatchpoint.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubClearingWatchpoint.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AdaptiveInferredPropertyValueWatchpointBase.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AdaptiveInferredPropertyValueWatchpointBase.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
@@ -1151,9 +1153,6 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AsyncFunctionPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AsyncFunctionPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
# This is a workaround for a JSC bug that impacts aarch64
@@ -1276,7 +1275,7 @@ jsc-build-mac-copy:
clean-jsc:
cd src/bun.js/WebKit && rm -rf **/CMakeCache.txt **/CMakeFiles && rm -rf src/bun.js/WebKit/WebKitBuild
clean-bindings:
rm -rf $(OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/webcore/*.o $(DEBUG_BINDINGS_OBJ) $(OBJ_DIR)/webcore/*.o $(BINDINGS_OBJ) $(OBJ_DIR)/*.d $(DEBUG_OBJ_DIR)/*.d
rm -rf $(OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/webcore/*.o $(DEBUG_BINDINGS_OBJ) $(OBJ_DIR)/webcore/*.o $(BINDINGS_OBJ)
.PHONY: clean
clean: clean-bindings
@@ -1289,12 +1288,12 @@ clean: clean-bindings
(cd $(BUN_DEPS_DIR)/c-ares && rm -rf build && make clean) || echo "";
.PHONY: release-bindings
release-bindings: $(OBJ_DIR) $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILES) $(NODE_OS_OBJ_FILES) $(BUILTINS_OBJ_FILES) $(IO_FILES) $(MODULES_OBJ_FILES) $(WEBCRYPTO_OBJ_FILES)
release-bindings: $(OBJ_DIR) $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILES) $(NODE_OS_OBJ_FILES) $(BUILTINS_OBJ_FILES) $(IO_FILES) $(MODULES_OBJ_FILES)
# Do not add $(DEBUG_DIR) to this list
# It will break caching, causing you to have to wait for every .cpp file to rebuild.
.PHONY: bindings
bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES) $(DEBUG_WEBCRYPTO_OBJ_FILES)
bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES)
.PHONY: jsc-bindings-mac
jsc-bindings-mac: bindings
@@ -1303,7 +1302,7 @@ jsc-bindings-mac: bindings
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_TRACK_VALGRIND=ON
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
@@ -1325,9 +1324,8 @@ mimalloc-debug:
-DMI_OVERRIDE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-DCMAKE_CXX_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
@@ -1349,9 +1347,8 @@ mimalloc:
-DMI_OVERRIDE=OFF \
-DMI_OSX_ZONE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja;
.\
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
@@ -1359,21 +1356,15 @@ mimalloc-wasm:
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
# alias for link, incase anyone still types that
.PHONY: bun-link-lld-debug
bun-link-lld-debug: link
# link a debug build of bun
.PHONY: link
link:
bun-link-lld-debug:
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
-W \
-o $(DEBUG_BIN)/bun-debug
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
link-no-jsc:
bun-link-lld-debug-no-jsc:
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
@@ -1392,19 +1383,6 @@ bun-relink-copy:
mkdir -p $(PACKAGE_DIR)
cp $(BUN_DEPLOY_DIR).o $(BUN_RELEASE_BIN).o
.PHONY: bun-link-lld-profile profile
bun-link-lld-profile:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) -g -gdwarf-4 -fno-omit-frame-pointer \
$(BUN_RELEASE_BIN).o \
-o $(BUN_RELEASE_BIN) \
-W \
$(OPTIMIZATION_LEVEL) $(RELEASE_FLAGS)
rm -rf $(BUN_RELEASE_BIN).dSYM
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
@rm -f $(BUN_RELEASE_BIN).o.o # workaround for https://github.com/ziglang/zig/issues/14080
build-profile: build-obj bun-link-lld-profile bun-codesign-release-local
bun-link-lld-release:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) \
$(BUN_RELEASE_BIN).o \
@@ -1455,11 +1433,11 @@ wasm-return1:
generate-classes:
bun src/bun.js/scripts/generate-classes.ts
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
clang-format -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
generate-sink:
bun src/bun.js/scripts/generate-jssink.js
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
clang-format -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1483,7 +1461,6 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1494,7 +1471,6 @@ $(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1505,7 +1481,6 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1516,7 +1491,6 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1527,7 +1501,6 @@ $(OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1538,30 +1511,16 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
$(OBJ_DIR)/%.o: src/js/out/%.cpp
$(OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
$(OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1575,7 +1534,6 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1590,7 +1548,6 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1603,7 +1560,6 @@ $(DEBUG_OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1619,7 +1575,6 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1634,7 +1589,6 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1644,12 +1598,11 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: src/js/out/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/js/out/%.cpp
.PHONY: src/bun.js/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1662,7 +1615,6 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1671,12 +1623,11 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
-g3 -c -o $@ $<
.PHONY: src/bun.js/bindings/webcrypto/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(DEBUG_OBJ_DIR)/webcrypto/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-I$(SRC_DIR) \
-fno-rtti \
@@ -1685,6 +1636,43 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
-DBUN_DEBUG \
-g3 -c -o $@ $<
.PHONY: webcrypto-debug-obj
# Make all the .cpp files in the webcrypto directory into .o files using Makefile substitutions
webcrypto-debug-obj: $(patsubst src/bun.js/bindings/webcrypto/%.cpp, $(DEBUG_OBJ_DIR)/webcrypto/%.o, $(wildcard src/bun.js/bindings/webcrypto/*.cpp))
.PHONY: webcrypto-debug
webcrypto-debug:
rm -rf $(DEBUG_OBJ_DIR)/webcrypto $(BUN_DEPS_OUT_DIR)/libwebcrypto-debug.a
mkdir -p $(DEBUG_OBJ_DIR)/webcrypto
make webcrypto-debug-obj -j$(CPUS)
$(AR) rcs $(BUN_DEPS_OUT_DIR)/libwebcrypto-debug.a $(DEBUG_OBJ_DIR)/webcrypto/*.o
$(OBJ_DIR)/webcrypto/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_RELEASE) \
-g3 -c -o $@ $<
.PHONY: webcrypto-obj
# Make all the .cpp files in the webcrypto directory into .o files using Makefile substitutions
webcrypto-obj: $(patsubst src/bun.js/bindings/webcrypto/%.cpp, $(OBJ_DIR)/webcrypto/%.o, $(wildcard src/bun.js/bindings/webcrypto/*.cpp))
.PHONY: webcrypto
webcrypto:
rm -rf $(OBJ_DIR)/webcrypto $(BUN_DEPS_OUT_DIR)/libwebcrypto.a
mkdir -p $(OBJ_DIR)/webcrypto
make webcrypto-obj -j$(CPUS)
$(AR) rcs $(BUN_DEPS_OUT_DIR)/libwebcrypto.a $(OBJ_DIR)/webcrypto/*.o
sizegen:
mkdir -p $(BUN_TMP_DIR)
$(CXX) src/bun.js/headergen/sizegen.cpp -Wl,-dead_strip -Wl,-dead_strip_dylibs -fuse-ld=lld -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1
@@ -1851,47 +1839,12 @@ copy-to-bun-release-dir-bin:
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: cold-jsc-start
cold-jsc-start:
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(LIBICONV_PATH) \
$(DEFAULT_LINKER_FLAGS) \
$(PLATFORM_LINKER_FLAGS) \
$(ICU_FLAGS) \
$(JSC_FILES) \
misctools/cold-jsc-start.cpp -o cold-jsc-start
.PHONY: vendor-without-npm
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd
.PHONY: vendor-without-check
vendor-without-check: npm-install vendor-without-npm
vendor-without-check: npm-install node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares
.PHONY: vendor
vendor: require init-submodules vendor-without-check
.PHONY: vendor-dev
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
.PHONY: regenerate-bindings
regenerate-bindings:
@make clean-bindings builtins
@make bindings -j$(CPU_COUNT)
.PHONY: setup
setup: vendor-dev identifier-cache clean-bindings
make jsc-check
make bindings -j$(CPU_COUNT)
@echo ""
@echo "Development environment setup complete"
@echo "Run \`make dev\` to build \`bun-debug\`"
@echo ""

View File

@@ -40,7 +40,7 @@ bun run index.tsx # TS and JSX supported out of the box
bun test # run tests
bun run start # run the `start` script in `package.json`
bun install <pkg> # install a package
bunx cowsay 'Hello, world!' # execute a package
bunx cowsay "Hello, world!" # execute a package
```
## Install
@@ -48,7 +48,7 @@ bunx cowsay 'Hello, world!' # execute a package
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
>
>
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
```sh
@@ -67,6 +67,7 @@ docker pull oven/bun
docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
### Upgrade
To upgrade to the latest version of Bun, run:
@@ -99,7 +100,7 @@ bun upgrade --canary
- [Runtime](https://bun.sh/docs/runtime/index)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Hot &amp; live reloading](https://bun.sh/docs/runtime/hot)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- [Plugins](https://bun.sh/docs/runtime/plugins)
- Ecosystem
- [Node.js](https://bun.sh/docs/ecosystem/nodejs)
- [TypeScript](https://bun.sh/docs/ecosystem/typescript)
@@ -126,9 +127,10 @@ bun upgrade --canary
- [DNS](https://bun.sh/docs/api/dns)
- [Node-API](https://bun.sh/docs/api/node-api)
## Contributing
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.
Refer to the [Project > Contributing](https://bun.sh/docs/project/developing) guide to start contributing to Bun.
## License

Binary file not shown.

View File

@@ -1,171 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
esbuild

View File

@@ -1,40 +0,0 @@
# Bundler benchmark
This is a performance benchmark of the following bundlers:
- Bun
- esbuild
- Parcel 2
- Rollup + Terser
- Webpack
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
To run the benchmark:
```sh
$ chmod +x run-bench.sh
$ ./run-bench.sh
```
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
```sh
real <number>
user <number>
sys <number>
```
These lines are generated by the `time` command which is used to benchmark each build.
## Results
The `real` results, as run on a 16-inch M1 Macbook Pro:
| Bundler | Time |
| ------- | ------ |
| Bun | 0.17s |
| esbuild | 0.33s |
| Rollup | 18.82s |
| Webpack | 26.21 |
| Parcel | 17.95s |

Binary file not shown.

View File

@@ -1 +0,0 @@
console.log("Hello via Bun!");

View File

@@ -1,8 +0,0 @@
{
"name": "bundle",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.5.0"
}
}

View File

@@ -1,3 +0,0 @@
git clone git@github.com:colinhacks/esbuild.git
cd esbuild
make bench-three

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,31 +0,0 @@
import EventEmitter3 from "eventemitter3";
import { group } from "mitata";
import EventEmitterNative from "node:events";
export const implementations = [
{
EventEmitter: EventEmitterNative,
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
monkey: true,
},
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
].filter(Boolean);
for (const impl of implementations) {
impl.EventEmitter?.setMaxListeners?.(Infinity);
}
export function groupForEmitter(name, cb) {
if (implementations.length === 1) {
return cb({
...implementations[0],
name: `${name}: ${implementations[0].name}`,
});
} else {
return group(name, () => {
for (let impl of implementations) {
cb(impl);
}
});
}
}

View File

@@ -1,96 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("single emit", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
if (!called) throw new Error("not called");
});
});
// for (let { impl: EventEmitter, name, monkey } of []) {
// if (monkey) {
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
// monkeyEmitter.on("hello", event => {
// event.preventDefault();
// });
// bench(`[monkey] ${className}.emit`, () => {
// var called = false;
// monkeyEmitter.emit("hello", {
// preventDefault() {
// id++;
// called = true;
// },
// });
// if (!called) {
// throw new Error("monkey failed");
// }
// });
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
// var cb = () => {
// event.preventDefault();
// };
// monkeyEmitter.on("hey", cb);
// for (let i = 0; i < 10_000; i++)
// monkey.emit("hey", {
// preventDefault() {
// id++;
// },
// });
// monkeyEmitter.off("hey", cb);
// });
// }
// }
// var target = new EventTarget();
// target.addEventListener("hello", event => {});
// bench("EventTarget.dispatch", () => {
// target.dispatchEvent(event);
// });
// var hey = new Event("hey");
// bench("EventTarget.on x 10_000 (handler)", () => {
// var handler = event => {};
// target.addEventListener("hey", handler);
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
// target.removeEventListener("hey", handler);
// });
await run();

View File

@@ -1,40 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("test 1", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("test 2", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
await run();

View File

@@ -1,63 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
let rngState = 123456789;
function nextInt() {
const m = 0x80000000; // 2**31;
const a = 1103515245;
const c = 12345;
rngState = (a * rngState + c) % m;
return rngState;
}
function nextRange(start, end) {
// returns in range [start, end): including start, excluding end
// can't modulu nextInt because of weak randomness in lower bits
const rangeSize = end - start;
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
return start + Math.floor(randomUnder1 * rangeSize);
}
const chunks = new Array(1024).fill(null).map((_, j) => {
const arr = new Uint8Array(1024);
for (let i = 0; i < arr.length; i++) {
arr[i] = nextRange(0, 256);
}
return arr;
});
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
bench(name, () => {
let id = 0;
const stream = new EventEmitter();
stream.on("start", res => {
if (res.status !== 200) throw new Error("not 200");
});
const recived = [];
stream.on("data", req => {
recived.push(req);
});
stream.on("end", ev => {
ev.preventDefault();
});
// simulate a stream
stream.emit("start", { status: 200 });
for (let chunk of chunks) {
stream.emit("data", chunk);
}
stream.emit("end", {
preventDefault() {
id++;
},
});
if (id !== 1) throw new Error("not implemented right");
if (recived.length !== 1024) throw new Error("not implemented right");
});
});
await run();

View File

@@ -128,27 +128,27 @@ pub fn main() anyerror!void {
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -115,27 +115,27 @@ pub fn main() anyerror!void {
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -1,4 +0,0 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: ["@remix-run/eslint-config", "@remix-run/eslint-config/node"],
};

View File

@@ -1,10 +0,0 @@
node_modules
/.cache
/build
/public/build
.env
package-lock.json
yarn.lock
pnpm-lock.yaml
bun.lockb

View File

@@ -1,18 +0,0 @@
# `install` benchmark
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
```
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
```
To check that the app is working as expected:
```
$ bun run dev
$ npm run dev
$ yarn dev
$ pnpm dev
```
Then visit [http://localhost:3000](http://localhost:3000).

View File

@@ -1,18 +0,0 @@
/**
* By default, Remix will handle hydrating your app on the client for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
*/
import { RemixBrowser } from "@remix-run/react";
import { startTransition, StrictMode } from "react";
import { hydrateRoot } from "react-dom/client";
startTransition(() => {
hydrateRoot(
document,
<StrictMode>
<RemixBrowser />
</StrictMode>,
);
});

View File

@@ -1,101 +0,0 @@
/**
* By default, Remix will handle generating the HTTP Response for you.
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
*/
import { PassThrough } from "node:stream";
import type { EntryContext } from "@remix-run/node";
import { Response } from "@remix-run/node";
import { RemixServer } from "@remix-run/react";
import isbot from "isbot";
import { renderToPipeableStream } from "react-dom/server";
const ABORT_DELAY = 5_000;
export default function handleRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return isbot(request.headers.get("user-agent"))
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
}
function handleBotRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onAllReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
responseStatusCode = 500;
console.error(error);
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}
function handleBrowserRequest(
request: Request,
responseStatusCode: number,
responseHeaders: Headers,
remixContext: EntryContext,
) {
return new Promise((resolve, reject) => {
const { pipe, abort } = renderToPipeableStream(
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
{
onShellReady() {
const body = new PassThrough();
responseHeaders.set("Content-Type", "text/html");
resolve(
new Response(body, {
headers: responseHeaders,
status: responseStatusCode,
}),
);
pipe(body);
},
onShellError(error: unknown) {
reject(error);
},
onError(error: unknown) {
console.error(error);
responseStatusCode = 500;
},
},
);
setTimeout(abort, ABORT_DELAY);
});
}

View File

@@ -1,20 +0,0 @@
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
export default function App() {
return (
<html lang="en">
<head>
<meta charSet="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<Meta />
<Links />
</head>
<body>
<Outlet />
<ScrollRestoration />
<Scripts />
<LiveReload />
</body>
</html>
);
}

View File

@@ -1,30 +0,0 @@
import type { V2_MetaFunction } from "@remix-run/node";
export const meta: V2_MetaFunction = () => {
return [{ title: "New Remix App" }];
};
export default function Index() {
return (
<div style={{ fontFamily: "system-ui, sans-serif", lineHeight: "1.4" }}>
<h1>Welcome to Remix</h1>
<ul>
<li>
<a target="_blank" href="https://remix.run/tutorials/blog" rel="noreferrer">
15m Quickstart Blog Tutorial
</a>
</li>
<li>
<a target="_blank" href="https://remix.run/tutorials/jokes" rel="noreferrer">
Deep Dive Jokes App Tutorial
</a>
</li>
<li>
<a target="_blank" href="https://remix.run/docs" rel="noreferrer">
Remix Docs
</a>
</li>
</ul>
</div>
);
}

View File

@@ -1,31 +0,0 @@
{
"private": true,
"sideEffects": false,
"scripts": {
"build": "remix build",
"dev": "remix dev",
"start": "remix-serve build",
"typecheck": "tsc",
"clean": "rm -rf node_modules",
"bench": "hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'"
},
"dependencies": {
"@remix-run/node": "^1.15.0",
"@remix-run/react": "^1.15.0",
"@remix-run/serve": "^1.15.0",
"isbot": "^3.6.5",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@remix-run/dev": "^1.15.0",
"@remix-run/eslint-config": "^1.15.0",
"@types/react": "^18.0.25",
"@types/react-dom": "^18.0.8",
"eslint": "^8.27.0",
"typescript": "^4.8.4"
},
"engines": {
"node": ">=14"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

View File

@@ -1,14 +0,0 @@
/** @type {import('@remix-run/dev').AppConfig} */
module.exports = {
ignoredRouteFiles: ["**/.*"],
// appDirectory: "app",
// assetsBuildDirectory: "public/build",
// serverBuildPath: "build/index.js",
// publicPath: "/build/",
future: {
v2_errorBoundary: true,
v2_meta: true,
v2_normalizeFormMethod: true,
v2_routeConvention: true,
},
};

View File

@@ -1,2 +0,0 @@
/// <reference types="@remix-run/dev" />
/// <reference types="@remix-run/node" />

View File

@@ -1,22 +0,0 @@
{
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ES2019"],
"isolatedModules": true,
"esModuleInterop": true,
"jsx": "react-jsx",
"moduleResolution": "node",
"resolveJsonModule": true,
"target": "ES2019",
"strict": true,
"allowJs": true,
"forceConsistentCasingInFileNames": true,
"baseUrl": ".",
"paths": {
"~/*": ["./app/*"]
},
// Remix takes care of building everything in `remix build`.
"noEmit": true
}
}

1986
bench/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,11 @@
{
"name": "bench",
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"mitata": "^0.1.6",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"mitata": "^0.1.6"
"@swc/core": "^1.2.133",
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7"
},
"scripts": {
"ffi": "cd ffi && bun run deps && bun run build && bun run bench",

View File

@@ -1,5 +1,5 @@
// to run this:
// NODE_ENV=production bun react-hello-world.jsx
// NODE_ENV=production bun --jsx-production react-hello-world.jsx
// Make sure you're using react-dom@18.3.0 or later.
// Currently that is available at react-dom@next (which is installed in this repository)

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,6 +1,5 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import * as assert from "assert";
bench("deepEqual", () => {

View File

@@ -1,9 +1,6 @@
// @runtime bun,node,deno
import { bench, run } from "./runner.mjs";
import process from "node:process";
import { Buffer } from "node:buffer";
import { bench, run } from "mitata";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";

BIN
bench/snippets/bun.lockb Executable file

Binary file not shown.

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { readFileSync } from "fs";
import { allocUnsafe } from "bun";

View File

@@ -1,29 +0,0 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
const scenarios = [
{ alg: "md5", digest: "hex" },
{ alg: "md5", digest: "base64" },
{ alg: "sha1", digest: "hex" },
{ alg: "sha1", digest: "base64" },
{ alg: "sha256", digest: "hex" },
{ alg: "sha256", digest: "base64" },
];
for (const { alg, digest } of scenarios) {
bench(`${alg}-${digest}`, () => {
createHash(alg).update(data).digest(digest);
});
if ("Bun" in globalThis) {
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
new Bun.CryptoHasher(alg).update(data).digest(digest);
});
}
}
run();

View File

@@ -1,15 +1,15 @@
// so it can run in environments without node module resolution
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import crypto from "node:crypto";
var foo = Buffer.allocUnsafe(512);
var foo = Buffer.allocUnsafe(16384);
foo.fill(123);
// if ("Bun" in globalThis) {
// const { CryptoHasher } = Bun;
// bench("Bun.CryptoHasher(sha512)", () => {
// var hasher = new CryptoHasher("sha512");
// bench("CryptoHasher Blake2b256", () => {
// var hasher = new CryptoHasher("blake2b256");
// hasher.update(foo);
// hasher.digest();
// });

View File

@@ -1,26 +0,0 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
const scenarios = [
{ alg: "md5", digest: "hex" },
{ alg: "md5", digest: "base64" },
{ alg: "sha1", digest: "hex" },
{ alg: "sha1", digest: "base64" },
{ alg: "sha256", digest: "hex" },
{ alg: "sha256", digest: "base64" },
];
for (const { alg, digest } of scenarios) {
bench(`${alg}-${digest}`, () => {
const hasher = createHash(alg);
hasher.write(data);
hasher.end();
hasher.read();
});
}
run();

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import fastDeepEquals from "fast-deep-equal/es6/index";
// const Date = globalThis.Date;

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,5 +1,5 @@
import { dns } from "bun";
import { bench, run, group } from "./runner.mjs";
import { bench, run, group } from "mitata";
async function forEachBackend(name, fn) {
group(name, () => {

101
bench/snippets/emitter.mjs Normal file
View File

@@ -0,0 +1,101 @@
// **so this file can run in node**
import { createRequire } from "node:module";
const require = createRequire(import.meta.url);
// --
const EventEmitterNative = require("node:events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", event => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
var target = new EventTarget();
target.addEventListener("hello", event => {});
bench("EventTarget.dispatch", () => {
target.dispatchEvent(event);
});
var hey = new Event("hey");
bench("EventTarget.on x 10_000 (handler)", () => {
var handler = event => {};
target.addEventListener("hey", handler);
for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
target.removeEventListener("hey", handler);
});
await run();

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
import { encode as htmlEntityEncode } from "html-entities";
import { escape as heEscape } from "he";

View File

@@ -1,5 +1,5 @@
import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
const types = {
returns_true: {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,57 +0,0 @@
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
boop: {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
},
"cors": false,
},
},
},
"cors": false,
},
};
var big = JSON.stringify(obj);
bench("JSON.parse(obj)", () => {
globalThis.foo = JSON.parse(big);
});
bench("JSON.stringify(obj)", () => {
globalThis.bar = JSON.stringify(obj);
});
await run();

View File

@@ -1,65 +0,0 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
return 42;
},
set() {
throw new Error("bad");
},
configurable: true,
});
if (module.exports !== 42) throw new Error("bad");
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
});
bench("Object.defineProperty(module.exports = {})", () => {
Object.defineProperty(module, "exports", {
value: { abc: 123 },
});
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
bench("module.exports = {}", () => {
module.exports = { abc: 123 };
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
run().then(() => {
module.exports = {
a: 1,
};
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
module.exports = function lol() {
return 42;
};
console.log(module.exports, module.exports());
queueMicrotask(() => {
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
});
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,42 +0,0 @@
// @runtime node, bun
import { bench, run } from "./runner.mjs";
import * as vm from "node:vm";
const context = {
animal: "cat",
count: 2,
};
const script = new vm.Script("animal = 'hey'");
vm.createContext(context);
bench("vm.Script.runInContext", () => {
script.runInContext(context);
});
bench("vm.Script.runInThisContext", () => {
script.runInThisContext(context);
});
bench("vm.Script.runInNewContext", () => {
script.runInNewContext(context);
});
bench("vm.runInContext", () => {
vm.runInContext("animal = 'hey'", context);
});
bench("vm.runInNewContext", () => {
vm.runInNewContext("animal = 'hey'", context);
});
bench("vm.runInThisContext", () => {
vm.runInThisContext("animal = 'hey'", context);
});
bench("vm.createContext", () => {
vm.createContext({ yo: 1 });
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -0,0 +1,7 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
},
"prettier": "../../.prettierrc.cjs"
}

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { renderToReadableStream } from "react-dom/server.browser";
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = (function () {
const text = "Hello World!";

View File

@@ -1,5 +1,5 @@
import { readdirSync } from "fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";

View File

@@ -1,10 +1,4 @@
import { realpathSync } from "node:fs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];
import { bench, run } from "./runner.mjs";
bench("realpathSync x " + count, () => {
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
});
await run();
for (let i = 0; i < count; i++) realpathSync(arg);

View File

@@ -1,244 +0,0 @@
// note: this isn't done yet
// we look for `// @runtime` in the file to determine which runtimes to run the benchmark in
import { spawnSync } from "bun";
import { readdirSync, readFileSync } from "node:fs";
import { Database } from "bun:sqlite";
import { extname, basename } from "path";
const exts = [".js", ".ts", ".mjs", ".tsx"];
const runtimes = {
bun: process.execPath,
node: process.env.NODE ?? Bun.which("node"),
deno: process.env.DENO ?? Bun.which("deno"),
};
if (process.env.BUN_ONLY) {
delete runtimes.node;
delete runtimes.deno;
}
function getEntry(sourceContents, file) {
const targetLineStart = sourceContents.indexOf("// @runtime ");
if (targetLineStart === -1) {
return;
}
const targetLineEnd = sourceContents.indexOf("\n", targetLineStart);
if (targetLineEnd === -1) {
return;
}
const targetLine = sourceContents.slice(targetLineStart, targetLineEnd);
const targets = targetLine
.slice("// @runtime ".length)
.split(/[,\s]+/gm)
.map(a => a.trim().toLowerCase())
.filter(Boolean)
.sort();
if (targets.length === 0) {
throw new TypeError("No targets specified in " + JSON.stringify(file) + "\n> " + JSON.stringify(targetLine) + "\n");
}
var cmds = {};
for (let target of targets) {
if (!(target in runtimes)) {
throw new TypeError(
"Unknown target " + JSON.stringify(target) + "\n> " + targetLine + "\n file:" + JSON.stringify(file),
);
}
switch (target) {
case "bun": {
if (!runtimes.bun) {
continue;
}
cmds.bun = [runtimes.bun, "run", file];
break;
}
case "node": {
if (!runtimes.node) {
continue;
}
cmds.node = [runtimes.node, file];
break;
}
case "deno": {
if (!runtimes.deno) {
continue;
}
cmds.deno = [runtimes.deno, "run", "-A", "--unstable", file];
break;
}
default: {
throw new Error("This should not be reached.");
break;
}
}
}
if (Object.keys(cmds).length === 0) {
return;
}
return cmds;
}
function scan() {
const queue = [];
for (let file of readdirSync(import.meta.dir)) {
if (!exts.includes(extname(file))) continue;
if (file.includes("runner")) continue;
const cmds = getEntry(readFileSync(file, "utf8"), file);
if (!cmds) continue;
queue.push({ file, cmds });
}
return queue;
}
const env = {
...process.env,
BENCHMARK_RUNNER: "1",
NODE_NO_WARNINGS: "1",
NODE_OPTIONS: "--no-warnings",
BUN_DEBUG_QUIET_LOGS: "1",
NO_COLOR: "1",
DISABLE_COLORS: "1",
};
function* run({ cmds, file }) {
const benchmarkID = basename(file)
.toLowerCase()
.replace(/\.m?js$/, "")
.replace(/\.tsx?$/, "")
.replace(".node", "")
.replace(".deno", "")
.replace(".bun", "");
// if benchmarkID doesn't contain only words, letters or numbers or dashes or underscore, throw
if (!/^[a-z0-9_-]+$/i.test(benchmarkID)) {
throw new Error(
"Benchmark files must only contain /a-zA-Z0-9-_/ " +
JSON.stringify(benchmarkID) +
" in file " +
JSON.stringify(file),
);
}
for (let runtime in cmds) {
const timestamp = Date.now();
const spawnStart = performance.now();
var { stdout, exitCode } = spawnSync({
cmd: cmds[runtime],
env,
stderr: "inherit",
stdout: "pipe",
});
const spawnElapsed = performance.now() - spawnStart;
stdout = stdout.toString();
try {
const json = JSON.parse(stdout.trim());
yield {
file: file,
benchmarkID,
result: json,
runtime: runtime,
timestamp,
elapsed: spawnElapsed,
runtimeVersion: (runtime === "bun"
? `${Bun.version}-${Bun.revision}`
: String(json?.runtime).split(" ").at(1)
).replace(/^v/, ""),
};
} catch (e) {
console.error("Failing file", file);
console.error(JSON.stringify(cmds[runtime]));
console.error(stdout.toString());
throw e;
}
if (exitCode !== 0) {
throw new Error("Non-zero exit code in file " + JSON.stringify(file) + ", runtime: " + JSON.stringify(runtime));
}
}
}
const db = Database.open(process.RUNNER_DB_PATH ?? `runs-${process.platform}-${process.arch}.db`);
db.run(`CREATE TABLE IF NOT EXISTS benchmarkResults (
id INTEGER PRIMARY KEY AUTOINCREMENT,
results TEXT NOT NULL
);`);
db.run(`
CREATE TABLE IF NOT EXISTS runs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
runtime TEXT NOT NULL,
runtimeVersion TEXT NOT NULL,
benchmarkID TEXT NOT NULL,
timestamp INTEGER NOT NULL,
elapsed REAL NOT NULL,
benchmarkResultID INTEGER NOT NULL
);
`);
db.run(`CREATE INDEX IF NOT EXISTS runs_benchmarkID ON runs (benchmarkID);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_timestamp ON runs (timestamp);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_runtime ON runs (runtime);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_runtimeVersion ON runs (runtimeVersion);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_benchmarkResultID ON runs (benchmarkResultID);`);
// TODO: finish this
for (let result of scan()) {
var prevBenchmarkID = null;
for (let {
runtime,
benchmarkID,
runtimeVersion,
timestamp,
elapsed,
file,
result: { benchmarks },
} of run(result)) {
if (prevBenchmarkID !== benchmarkID) {
console.log("\n" + `${benchmarkID}:`);
prevBenchmarkID = benchmarkID;
}
console.log(
" ",
`[${Math.round(elapsed)}ms]`,
"Executed",
JSON.stringify(benchmarkID),
"in",
runtime,
runtimeVersion,
"(" + file + ")",
);
const { id: benchmarkResultID } = db
.query(
`
INSERT INTO benchmarkResults (results) VALUES (?) RETURNING id
`,
)
.get(JSON.stringify(benchmarks));
db.run(
`
INSERT INTO runs (runtime, runtimeVersion, benchmarkID, timestamp, elapsed, benchmarkResultID) VALUES (
?, ?, ?, ?, ?, ?)`,
runtime,
runtimeVersion,
benchmarkID,
timestamp,
elapsed,
benchmarkResultID,
);
}
}

Some files were not shown because too many files have changed in this diff Show More