mirror of
https://github.com/oven-sh/bun
synced 2026-02-08 01:49:33 +00:00
Compare commits
7 Commits
bun-json
...
dylan/gith
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c2a7c6b82 | ||
|
|
766f8ceebc | ||
|
|
c03f7c998d | ||
|
|
beb03c3c54 | ||
|
|
8846ae2454 | ||
|
|
a4c379d316 | ||
|
|
25b080a05e |
28
.devcontainer/README.md
Normal file
28
.devcontainer/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Bun's Dev Container
|
||||
|
||||
To get started, login to GitHub and clone bun's GitHub repo into `/build/bun`
|
||||
|
||||
# First time setup
|
||||
|
||||
```bash
|
||||
gh auth login # if it fails to open a browser, use Personal Access Token instead
|
||||
gh repo clone oven-sh/bun . -- --depth=1 --progress -j8
|
||||
```
|
||||
|
||||
# Compile bun dependencies (zig is already compiled)
|
||||
|
||||
```bash
|
||||
make devcontainer
|
||||
```
|
||||
|
||||
# Build bun for development
|
||||
|
||||
```bash
|
||||
make dev
|
||||
```
|
||||
|
||||
# Run bun
|
||||
|
||||
```bash
|
||||
bun-debug help
|
||||
```
|
||||
70
.devcontainer/devcontainer.json
Normal file
70
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,70 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
|
||||
{
|
||||
"name": "bun (Ubuntu)",
|
||||
|
||||
// Sets the run context to one level up instead of the .devcontainer folder.
|
||||
"context": "..",
|
||||
"hostRequirements": { "memory": "16gb" },
|
||||
|
||||
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
|
||||
"dockerFile": "../Dockerfile.devcontainer",
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/zsh",
|
||||
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
|
||||
"zig.zigPath": "/build/zig/zig",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"AugusteRame.zls-vscode",
|
||||
"ms-vscode.cpptools",
|
||||
"/home/ubuntu/vscode-zig.vsix",
|
||||
"vadimcn.vscode-lldb",
|
||||
"esbenp.prettier-vscode",
|
||||
"xaver.clang-format"
|
||||
],
|
||||
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; cat /build/README.md",
|
||||
|
||||
"build": {
|
||||
"target": "bun.devcontainer",
|
||||
"cacheFrom": ["ghcr.io/oven-sh/bun.devcontainer:latest"],
|
||||
"args": {
|
||||
"BUILDARCH": "${localEnv:DOCKER_BUILDARCH:amd64}",
|
||||
"--platform": "linux/${localEnv:DOCKER_BUILDARCH:amd64}",
|
||||
"--tag": "ghcr.io/oven-sh/bun.devcontainer:latest"
|
||||
}
|
||||
},
|
||||
"runArgs": [
|
||||
"--ulimit",
|
||||
"memlock=-1:-1",
|
||||
"--ulimit",
|
||||
"nofile=65536:65536",
|
||||
"--cap-add=SYS_PTRACE",
|
||||
"--security-opt",
|
||||
"seccomp=unconfined"
|
||||
],
|
||||
"workspaceMount": "source=bun,target=/build/bun,type=volume",
|
||||
"workspaceFolder": "/build/bun",
|
||||
"mounts": [
|
||||
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
|
||||
"source=bun-config,target=/home/ubuntu/.config,type=volume"
|
||||
],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000, 8081, 8080]
|
||||
|
||||
// Uncomment the next line to run commands after the container is created - for example installing curl.
|
||||
// "postCreateCommand": "apt-get update && apt-get install -y curl",
|
||||
|
||||
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
|
||||
|
||||
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
|
||||
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
|
||||
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
// "remoteUser": "vscode"
|
||||
}
|
||||
61
.devcontainer/limits.conf
Normal file
61
.devcontainer/limits.conf
Normal file
@@ -0,0 +1,61 @@
|
||||
# /etc/security/limits.conf
|
||||
#
|
||||
#Each line describes a limit for a user in the form:
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
#Where:
|
||||
#<domain> can be:
|
||||
# - a user name
|
||||
# - a group name, with @group syntax
|
||||
# - the wildcard *, for default entry
|
||||
# - the wildcard %, can be also used with %group syntax,
|
||||
# for maxlogin limit
|
||||
# - NOTE: group and wildcard limits are not applied to root.
|
||||
# To apply a limit to the root user, <domain> must be
|
||||
# the literal username root.
|
||||
#
|
||||
#<type> can have the two values:
|
||||
# - "soft" for enforcing the soft limits
|
||||
# - "hard" for enforcing hard limits
|
||||
#
|
||||
#<item> can be one of the following:
|
||||
# - core - limits the core file size (KB)
|
||||
# - data - max data size (KB)
|
||||
# - fsize - maximum filesize (KB)
|
||||
# - memlock - max locked-in-memory address space (KB)
|
||||
# - nofile - max number of open file descriptors
|
||||
# - rss - max resident set size (KB)
|
||||
# - stack - max stack size (KB)
|
||||
# - cpu - max CPU time (MIN)
|
||||
# - nproc - max number of processes
|
||||
# - as - address space limit (KB)
|
||||
# - maxlogins - max number of logins for this user
|
||||
# - maxsyslogins - max number of logins on the system
|
||||
# - priority - the priority to run user process with
|
||||
# - locks - max number of file locks the user can hold
|
||||
# - sigpending - max number of pending signals
|
||||
# - msgqueue - max memory used by POSIX message queues (bytes)
|
||||
# - nice - max nice priority allowed to raise to values: [-20, 19]
|
||||
# - rtprio - max realtime priority
|
||||
# - chroot - change root to directory (Debian-specific)
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
|
||||
* soft memlock 33554432
|
||||
* hard memlock 33554432
|
||||
* soft nofile 33554432
|
||||
* hard nofile 33554432
|
||||
|
||||
#* soft core 0
|
||||
#root hard core 100000
|
||||
#* hard rss 10000
|
||||
#@student hard nproc 20
|
||||
#@faculty soft nproc 20
|
||||
#@faculty hard nproc 50
|
||||
#ftp hard nproc 0
|
||||
#ftp - chroot /ftp
|
||||
#@student - maxlogins 4
|
||||
|
||||
# End of file
|
||||
454
.devcontainer/scripts/common-debian.sh
Normal file
454
.devcontainer/scripts/common-debian.sh
Normal file
@@ -0,0 +1,454 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
||||
|
||||
set -e
|
||||
|
||||
INSTALL_ZSH=${1:-"true"}
|
||||
USERNAME=${2:-"automatic"}
|
||||
USER_UID=${3:-"automatic"}
|
||||
USER_GID=${4:-"automatic"}
|
||||
UPGRADE_PACKAGES=${5:-"true"}
|
||||
INSTALL_OH_MYS=${6:-"true"}
|
||||
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
||||
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
||||
rm -f /etc/profile.d/00-restore-env.sh
|
||||
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
|
||||
chmod +x /etc/profile.d/00-restore-env.sh
|
||||
|
||||
# If in automatic mode, determine if a user already exists, if not use vscode
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=vscode
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ]; then
|
||||
USERNAME=root
|
||||
USER_UID=0
|
||||
USER_GID=0
|
||||
fi
|
||||
|
||||
# Load markers to see which steps have already run
|
||||
if [ -f "${MARKER_FILE}" ]; then
|
||||
echo "Marker file found:"
|
||||
cat "${MARKER_FILE}"
|
||||
source "${MARKER_FILE}"
|
||||
fi
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Function to call apt-get if needed
|
||||
apt_get_update_if_needed()
|
||||
{
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
||||
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
||||
|
||||
package_list="apt-utils \
|
||||
openssh-client \
|
||||
gnupg2 \
|
||||
dirmngr \
|
||||
iproute2 \
|
||||
procps \
|
||||
lsof \
|
||||
htop \
|
||||
net-tools \
|
||||
psmisc \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
unzip \
|
||||
zip \
|
||||
nano \
|
||||
vim-tiny \
|
||||
less \
|
||||
jq \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
dialog \
|
||||
libc6 \
|
||||
libgcc1 \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
libicu[0-9][0-9] \
|
||||
liblttng-ust[0-9] \
|
||||
libstdc++6 \
|
||||
zlib1g \
|
||||
locales \
|
||||
sudo \
|
||||
ncdu \
|
||||
man-db \
|
||||
strace \
|
||||
manpages \
|
||||
manpages-dev \
|
||||
init-system-helpers"
|
||||
|
||||
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
||||
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
||||
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
||||
. /etc/os-release
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
package_list="${package_list} manpages-posix manpages-posix-dev"
|
||||
else
|
||||
apt_get_update_if_needed
|
||||
fi
|
||||
|
||||
# Install libssl1.1 if available
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
||||
package_list="${package_list} libssl1.1"
|
||||
fi
|
||||
|
||||
# Install appropriate version of libssl1.0.x if available
|
||||
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
||||
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
||||
# Debian 9
|
||||
package_list="${package_list} libssl1.0.2"
|
||||
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
||||
# Ubuntu 18.04, 16.04, earlier
|
||||
package_list="${package_list} libssl1.0.0"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Packages to verify are installed: ${package_list}"
|
||||
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
|
||||
|
||||
# Install git if not already installed (may be more recent than distro version)
|
||||
if ! type git > /dev/null 2>&1; then
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
PACKAGES_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Get to latest versions of all packages
|
||||
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y upgrade --no-install-recommends
|
||||
apt-get autoremove -y
|
||||
fi
|
||||
|
||||
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
||||
# Common need for both applications and things like the agnoster ZSH theme.
|
||||
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
|
||||
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
|
||||
locale-gen
|
||||
LOCALE_ALREADY_SET="true"
|
||||
fi
|
||||
|
||||
# Create or update a non-root user to match UID/GID.
|
||||
group_name="${USERNAME}"
|
||||
if id -u ${USERNAME} > /dev/null 2>&1; then
|
||||
# User exists, update if needed
|
||||
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
||||
group_name="$(id -gn $USERNAME)"
|
||||
groupmod --gid $USER_GID ${group_name}
|
||||
usermod --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
||||
usermod --uid $USER_UID $USERNAME
|
||||
fi
|
||||
else
|
||||
# Create user
|
||||
if [ "${USER_GID}" = "automatic" ]; then
|
||||
groupadd $USERNAME
|
||||
else
|
||||
groupadd --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" = "automatic" ]; then
|
||||
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
||||
else
|
||||
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add sudo support for non-root user
|
||||
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
||||
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
|
||||
chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
EXISTING_NON_ROOT_USER="${USERNAME}"
|
||||
fi
|
||||
|
||||
# ** Shell customization section **
|
||||
if [ "${USERNAME}" = "root" ]; then
|
||||
user_rc_path="/root"
|
||||
else
|
||||
user_rc_path="/home/${USERNAME}"
|
||||
fi
|
||||
|
||||
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
|
||||
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
||||
fi
|
||||
|
||||
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
|
||||
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
||||
fi
|
||||
|
||||
# .bashrc/.zshrc snippet
|
||||
rc_snippet="$(cat << 'EOF'
|
||||
|
||||
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
||||
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
||||
|
||||
# Display optional first run image specific notice if configured and terminal is interactive
|
||||
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
||||
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
||||
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
||||
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
||||
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
||||
fi
|
||||
mkdir -p "$HOME/.config/vscode-dev-containers"
|
||||
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
||||
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
||||
fi
|
||||
|
||||
# Set the default git editor if not already set
|
||||
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
||||
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
||||
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
||||
export GIT_EDITOR="code-insiders --wait"
|
||||
else
|
||||
export GIT_EDITOR="code --wait"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# code shim, it fallbacks to code-insiders if code is not available
|
||||
cat << 'EOF' > /usr/local/bin/code
|
||||
#!/bin/sh
|
||||
|
||||
get_in_path_except_current() {
|
||||
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
||||
}
|
||||
|
||||
code="$(get_in_path_except_current code)"
|
||||
|
||||
if [ -n "$code" ]; then
|
||||
exec "$code" "$@"
|
||||
elif [ "$(command -v code-insiders)" ]; then
|
||||
exec code-insiders "$@"
|
||||
else
|
||||
echo "code or code-insiders is not installed" >&2
|
||||
exit 127
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/code
|
||||
|
||||
# systemctl shim - tells people to use 'service' if systemd is not running
|
||||
cat << 'EOF' > /usr/local/bin/systemctl
|
||||
#!/bin/sh
|
||||
set -e
|
||||
if [ -d "/run/systemd/system" ]; then
|
||||
exec /bin/systemctl "$@"
|
||||
else
|
||||
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/systemctl
|
||||
|
||||
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
||||
codespaces_bash="$(cat \
|
||||
<<'EOF'
|
||||
|
||||
# Codespaces bash prompt theme
|
||||
__bash_prompt() {
|
||||
local userpart='`export XIT=$? \
|
||||
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
||||
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
||||
local gitbranch='`\
|
||||
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
||||
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
||||
if [ "${BRANCH}" != "" ]; then \
|
||||
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
||||
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
||||
echo -n " \[\033[1;33m\]✗"; \
|
||||
fi \
|
||||
&& echo -n "\[\033[0;36m\]) "; \
|
||||
fi; \
|
||||
fi`'
|
||||
local lightblue='\[\033[1;34m\]'
|
||||
local removecolor='\[\033[0m\]'
|
||||
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
||||
unset -f __bash_prompt
|
||||
}
|
||||
__bash_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
codespaces_zsh="$(cat \
|
||||
<<'EOF'
|
||||
# Codespaces zsh prompt theme
|
||||
__zsh_prompt() {
|
||||
local prompt_username
|
||||
if [ ! -z "${GITHUB_USER}" ]; then
|
||||
prompt_username="@${GITHUB_USER}"
|
||||
else
|
||||
prompt_username="%n"
|
||||
fi
|
||||
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
||||
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
||||
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
||||
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
||||
unset -f __zsh_prompt
|
||||
}
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
||||
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
||||
__zsh_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# Add RC snippet and custom bash prompt
|
||||
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/bash.bashrc
|
||||
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
echo "${codespaces_bash}" >> "/root/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
|
||||
fi
|
||||
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
||||
RC_SNIPPET_ALREADY_ADDED="true"
|
||||
fi
|
||||
|
||||
# Optionally install and configure zsh and Oh My Zsh!
|
||||
if [ "${INSTALL_ZSH}" = "true" ]; then
|
||||
if ! type zsh > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get install -y zsh
|
||||
fi
|
||||
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/zsh/zshrc
|
||||
ZSH_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
||||
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
||||
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
||||
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
||||
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
||||
user_rc_file="${user_rc_path}/.zshrc"
|
||||
umask g-w,o-w
|
||||
mkdir -p ${oh_my_install_dir}
|
||||
git clone --depth=1 \
|
||||
-c core.eol=lf \
|
||||
-c core.autocrlf=false \
|
||||
-c fsck.zeroPaddedFilemode=ignore \
|
||||
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
||||
-c receive.fsck.zeroPaddedFilemode=ignore \
|
||||
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
||||
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
|
||||
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
||||
|
||||
mkdir -p ${oh_my_install_dir}/custom/themes
|
||||
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
||||
# Shrink git while still enabling updates
|
||||
cd "${oh_my_install_dir}"
|
||||
git repack -a -d -f --depth=1 --window=1
|
||||
# Copy to non-root user if one is specified
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
||||
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Persist image metadata info, script if meta.env found in same directory
|
||||
meta_info_script="$(cat << 'EOF'
|
||||
#!/bin/sh
|
||||
. /usr/local/etc/vscode-dev-containers/meta.env
|
||||
|
||||
# Minimal output
|
||||
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
||||
echo "${VERSION}"
|
||||
exit 0
|
||||
elif [ "$1" = "release" ]; then
|
||||
echo "${GIT_REPOSITORY_RELEASE}"
|
||||
exit 0
|
||||
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
||||
echo "${CONTENTS_URL}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#Full output
|
||||
echo
|
||||
echo "Development container image information"
|
||||
echo
|
||||
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
||||
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
||||
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
||||
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
||||
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
||||
echo
|
||||
EOF
|
||||
)"
|
||||
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
||||
mkdir -p /usr/local/etc/vscode-dev-containers/
|
||||
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
||||
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
|
||||
chmod +x /usr/local/bin/devcontainer-info
|
||||
fi
|
||||
|
||||
# Write marker file
|
||||
mkdir -p "$(dirname "${MARKER_FILE}")"
|
||||
echo -e "\
|
||||
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
||||
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
||||
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
||||
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
||||
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
|
||||
|
||||
echo "Done!"
|
||||
16
.devcontainer/scripts/getting-started.sh
Normal file
16
.devcontainer/scripts/getting-started.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "To get started, login to GitHub and clone bun's GitHub repo into /build/bun"
|
||||
echo "If it fails to open a browser, login with a Personal Access Token instead"
|
||||
echo "# First time setup"
|
||||
echo "gh auth login"
|
||||
echo "gh repo clone oven-sh/bun . -- --depth=1 --progress -j8"
|
||||
echo ""
|
||||
echo "# Compile bun dependencies (zig is already compiled)"
|
||||
echo "make devcontainer"
|
||||
echo ""
|
||||
echo "# Build bun for development"
|
||||
echo "make dev"
|
||||
echo ""
|
||||
echo "# Run bun"
|
||||
echo "bun-debug"
|
||||
207
.devcontainer/scripts/github.sh
Normal file
207
.devcontainer/scripts/github.sh
Normal file
@@ -0,0 +1,207 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./github-debian.sh [version]
|
||||
|
||||
CLI_VERSION=${1:-"latest"}
|
||||
|
||||
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
|
||||
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
|
||||
keyserver hkps://keys.openpgp.org
|
||||
keyserver hkp://keyserver.pgp.com"
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get central common setting
|
||||
get_common_setting() {
|
||||
if [ "${common_settings_file_loaded}" != "true" ]; then
|
||||
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
|
||||
common_settings_file_loaded=true
|
||||
fi
|
||||
if [ -f "/tmp/vsdc-settings.env" ]; then
|
||||
local multi_line=""
|
||||
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
||||
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
||||
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
||||
fi
|
||||
echo "$1=${!1}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Figure out correct version of a three part version number is not passed
|
||||
find_version_from_git_tags() {
|
||||
local variable_name=$1
|
||||
local requested_version=${!variable_name}
|
||||
if [ "${requested_version}" = "none" ]; then return; fi
|
||||
local repository=$2
|
||||
local prefix=${3:-"tags/v"}
|
||||
local separator=${4:-"."}
|
||||
local last_part_optional=${5:-"false"}
|
||||
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
||||
local escaped_separator=${separator//./\\.}
|
||||
local last_part
|
||||
if [ "${last_part_optional}" = "true" ]; then
|
||||
last_part="(${escaped_separator}[0-9]+)?"
|
||||
else
|
||||
last_part="${escaped_separator}[0-9]+"
|
||||
fi
|
||||
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
||||
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
||||
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
||||
else
|
||||
set +e
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
|
||||
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "${variable_name}=${!variable_name}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
local keyring_args=""
|
||||
if [ ! -z "$2" ]; then
|
||||
keyring_args="--no-default-keyring --keyring $2"
|
||||
fi
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to run apt-get if needed
|
||||
apt_get_update_if_needed() {
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Checks if packages are installed and installs them if not
|
||||
check_packages() {
|
||||
if ! dpkg -s "$@" >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Fall back on direct download if no apt package exists
|
||||
# Fetches .deb file to be installed with dpkg
|
||||
install_deb_using_github() {
|
||||
check_packages wget
|
||||
arch=$(dpkg --print-architecture)
|
||||
|
||||
find_version_from_git_tags CLI_VERSION https://github.com/cli/cli
|
||||
cli_filename="gh_${CLI_VERSION}_linux_${arch}.deb"
|
||||
|
||||
mkdir -p /tmp/ghcli
|
||||
pushd /tmp/ghcli
|
||||
wget https://github.com/cli/cli/releases/download/v${CLI_VERSION}/${cli_filename}
|
||||
dpkg -i /tmp/ghcli/${cli_filename}
|
||||
popd
|
||||
rm -rf /tmp/ghcli
|
||||
}
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
|
||||
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
|
||||
if ! type git >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
# Soft version matching
|
||||
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
|
||||
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
|
||||
version_suffix="=${CLI_VERSION}"
|
||||
else
|
||||
version_suffix=""
|
||||
fi
|
||||
|
||||
# Install the GitHub CLI
|
||||
echo "Downloading github CLI..."
|
||||
|
||||
install_deb_using_github
|
||||
|
||||
# Method below does not work until cli/cli#6175 is fixed
|
||||
# # Import key safely (new method rather than deprecated apt-key approach) and install
|
||||
# . /etc/os-release
|
||||
# receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
# echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" >/etc/apt/sources.list.d/github-cli.list
|
||||
# apt-get update
|
||||
# apt-get -y install "gh${version_suffix}"
|
||||
# rm -rf "/tmp/gh/gnupg"
|
||||
|
||||
echo "Done!"
|
||||
7
.devcontainer/scripts/nice.sh
Normal file
7
.devcontainer/scripts/nice.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
chsh -s $(which zsh)
|
||||
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl" -- --yes
|
||||
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
|
||||
|
||||
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
8
.devcontainer/scripts/zig-env.sh
Normal file
8
.devcontainer/scripts/zig-env.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
curl -L https://github.com/zigtools/zls-vscode/releases/download/1.1.6/zls-vscode-1.1.6.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
git clone https://github.com/zigtools/zls /home/ubuntu/zls
|
||||
cd /home/ubuntu/zls
|
||||
git checkout aabdb0c6ecb3c9a47feff2c2bfb9be4e95adf723
|
||||
git submodule update --init --recursive --progress --depth=1
|
||||
zig build -Drelease-fast
|
||||
9
.devcontainer/workspace.code-workspace
Normal file
9
.devcontainer/workspace.code-workspace
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
// Source code
|
||||
"name": "bun",
|
||||
"path": "bun"
|
||||
},
|
||||
]
|
||||
}
|
||||
9
.devcontainer/zls.json
Normal file
9
.devcontainer/zls.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"zig_exe_path": "/build/zig/zig",
|
||||
"enable_snippets": true,
|
||||
"warn_style": false,
|
||||
"enable_semantic_tokens": true,
|
||||
"operator_completions": true,
|
||||
"include_at_in_builtins": false,
|
||||
"max_detail_length": 1048576
|
||||
}
|
||||
23
.gitattributes
vendored
23
.gitattributes
vendored
@@ -8,26 +8,3 @@ src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
|
||||
*.zig text eol=lf
|
||||
src/bun.js/bindings/simdutf.cpp linguist-vendored
|
||||
src/bun.js/bindings/simdutf.h linguist-vendored
|
||||
|
||||
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.h linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/JSSink.h linguist-generated
|
||||
src/bun.js/bindings/JSSink.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
|
||||
|
||||
docs/**/* linguist-documentation
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: 📗 Documentation Issue
|
||||
name: 📗 Documentation Request
|
||||
description: Tell us if there is missing or incorrect documentation
|
||||
labels: [docs]
|
||||
labels: [documentation]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
62
.github/pull_request_template.md
vendored
62
.github/pull_request_template.md
vendored
@@ -1,62 +0,0 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I ran `make js` and committed the transpiled changes
|
||||
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I or my editor ran `zig fmt` on the changed files
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If functions were added to exports.zig or bindings.zig
|
||||
|
||||
- [ ] I ran `make headers` to regenerate the C header file
|
||||
|
||||
-->
|
||||
|
||||
<!-- If \*.classes.ts files were added or changed:
|
||||
|
||||
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
18
.github/workflows/bun-deploy-site.yml
vendored
18
.github/workflows/bun-deploy-site.yml
vendored
@@ -1,18 +0,0 @@
|
||||
# redeploy Vercel site when a file in `docs` changes
|
||||
# using VERCEL_DEPLOY_HOOK environment variable
|
||||
|
||||
name: Deploy site
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy site
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- name: Trigger Vercel build
|
||||
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}
|
||||
47
.github/workflows/bun-dockerhub.yml
vendored
Normal file
47
.github/workflows/bun-dockerhub.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: bun-dockerhub
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- dockerhub/Dockerfile
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths:
|
||||
- dockerhub/Dockerfile
|
||||
branches:
|
||||
- main
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
- name: Collect metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/bun
|
||||
tags: |
|
||||
type=match,pattern=bun-v(\d.\d.\d),group=1
|
||||
type=match,pattern=bun-v(\d.\d),group=1
|
||||
type=match,pattern=bun-v(\d),group=1
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
- name: Login to DockerHub
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
50
.github/workflows/bun-ecosystem-test.yml
vendored
50
.github/workflows/bun-ecosystem-test.yml
vendored
@@ -1,50 +0,0 @@
|
||||
name: bun-ecosystem-test
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 15 * * *" # every day at 7am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of Bun to run"
|
||||
required: true
|
||||
default: "canary"
|
||||
type: string
|
||||
jobs:
|
||||
test:
|
||||
name: ${{ matrix.tag }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64
|
||||
url: linux/x64?avx2=true
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64-baseline
|
||||
url: linux/x64?baseline=true
|
||||
# FIXME: runner fails with "No tests found"?
|
||||
#- os: macos-latest
|
||||
# tag: darwin-x64
|
||||
# url: darwin/x64?avx2=true
|
||||
- os: macos-latest
|
||||
tag: darwin-x64-baseline
|
||||
url: darwin/x64?baseline=true
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: Bhacaz/checkout-files@v2
|
||||
with:
|
||||
files: packages/bun-internal-test
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
|
||||
- id: test
|
||||
name: Test
|
||||
working-directory: packages/bun-internal-test
|
||||
run: bun run test:ecosystem
|
||||
30
.github/workflows/bun-homebrew.yml
vendored
Normal file
30
.github/workflows/bun-homebrew.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: bun-homebrew
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
- edited
|
||||
jobs:
|
||||
homebrew:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ env.HOMEBREW_TOKEN }}
|
||||
- id: setup-ruby
|
||||
name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: '2.6'
|
||||
- id: update-tap
|
||||
name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
|
||||
- id: commit-tap
|
||||
name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_message: Release ${{ github.event.release.tag_name }}
|
||||
136
.github/workflows/bun-linux-aarch64.yml
vendored
136
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -1,136 +0,0 @@
|
||||
name: bun-linux
|
||||
|
||||
concurrency:
|
||||
group: bun-linux-aarch64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: ${{matrix.tag}}
|
||||
runs-on: ${{matrix.runner}}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: native
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: |
|
||||
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
build-args: |
|
||||
ARCH=${{matrix.arch}}
|
||||
BUILDARCH=${{matrix.build_arch}}
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
WEBKIT_URL=${{matrix.webkit_url}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
WEBKIT_BASENAME=${{matrix.webkit_basename}}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: artifact
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Zip
|
||||
run: |
|
||||
# if zip is not found
|
||||
if [ ! -x "$(command -v zip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
|
||||
fi
|
||||
|
||||
if [ ! -x "$(command -v strip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
|
||||
fi
|
||||
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir bun-${{matrix.tag}}-profile
|
||||
mkdir bun-${{matrix.tag}}
|
||||
|
||||
strip bun
|
||||
|
||||
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
|
||||
mv bun bun-${{matrix.tag}}/bun
|
||||
|
||||
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
|
||||
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
|
||||
119
.github/workflows/bun-linux-build.yml
vendored
119
.github/workflows/bun-linux-build.yml
vendored
@@ -1,9 +1,4 @@
|
||||
name: bun-linux
|
||||
|
||||
concurrency:
|
||||
group: bun-linux-build-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
@@ -35,28 +30,34 @@ jobs:
|
||||
linux:
|
||||
name: ${{matrix.tag}}
|
||||
runs-on: ${{matrix.runner}}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- cpu: haswell
|
||||
tag: linux-x64
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
runner: linux-amd64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: nehalem
|
||||
- cpu: westmere
|
||||
tag: linux-x64-baseline
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
runner: linux-amd64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: native
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -127,20 +128,10 @@ jobs:
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
@@ -152,81 +143,11 @@ jobs:
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
|
||||
|
||||
linux-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linux]
|
||||
if: github.event_name == 'pull_request'
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: linux-x64
|
||||
- tag: linux-x64-baseline
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip bun-${{matrix.tag}}.zip
|
||||
cd bun-${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
|
||||
126
.github/workflows/bun-mac-aarch64.yml
vendored
126
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -1,9 +1,4 @@
|
||||
name: bun-macOS-aarch64
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-aarch64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
@@ -32,12 +27,11 @@ on:
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
runs-on: zig-object
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64-baseline
|
||||
# - cpu: haswell
|
||||
@@ -106,18 +100,17 @@ jobs:
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,16 +119,16 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +137,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
@@ -152,7 +145,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
@@ -173,9 +166,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -244,20 +237,19 @@ jobs:
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -265,14 +257,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -291,8 +283,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -341,7 +333,7 @@ jobs:
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
@@ -378,9 +370,7 @@ jobs:
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
@@ -392,79 +382,3 @@ jobs:
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
|
||||
126
.github/workflows/bun-mac-x64-baseline.yml
vendored
126
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -1,9 +1,4 @@
|
||||
name: bun-macOS-x64-baseline
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-x64-baseline-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
@@ -32,12 +27,11 @@ on:
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
runs-on: zig-object
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
- cpu: westmere
|
||||
arch: x86_64
|
||||
tag: bun-obj-darwin-x64-baseline
|
||||
# - cpu: haswell
|
||||
@@ -106,18 +100,17 @@ jobs:
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
- cpu: westmere
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,16 +119,16 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
- cpu: westmere
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +137,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +145,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,9 +166,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -245,20 +238,19 @@ jobs:
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
- cpu: westmere
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -266,14 +258,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -292,8 +284,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -345,7 +337,7 @@ jobs:
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
@@ -382,9 +374,7 @@ jobs:
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
@@ -396,79 +386,3 @@ jobs:
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
|
||||
124
.github/workflows/bun-mac-x64.yml
vendored
124
.github/workflows/bun-mac-x64.yml
vendored
@@ -1,9 +1,4 @@
|
||||
name: bun-macOS-x64
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-x64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
@@ -32,12 +27,11 @@ on:
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
runs-on: zig-object
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64-baseline
|
||||
- cpu: haswell
|
||||
@@ -106,18 +100,17 @@ jobs:
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
@@ -126,16 +119,16 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
@@ -144,7 +137,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +145,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,8 +166,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
@@ -247,20 +240,19 @@ jobs:
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# - cpu: westmere
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
@@ -268,14 +260,14 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -294,8 +286,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
env:
|
||||
@@ -347,7 +339,7 @@ jobs:
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
make webcrypto bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
@@ -384,9 +376,7 @@ jobs:
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
@@ -398,79 +388,3 @@ jobs:
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64
|
||||
runner: macos-11
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
|
||||
179
.github/workflows/bun-release-canary.yml
vendored
179
.github/workflows/bun-release-canary.yml
vendored
@@ -1,179 +0,0 @@
|
||||
name: bun-release-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- canary publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
# npm-types:
|
||||
# name: Release types to NPM
|
||||
# runs-on: ubuntu-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: packages/bun-types
|
||||
# steps:
|
||||
# - id: checkout
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# - id: setup-node
|
||||
# name: Setup Node.js
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: latest
|
||||
# - id: setup-bun
|
||||
# name: Setup Bun
|
||||
# uses: oven-sh/setup-bun@v1
|
||||
# with:
|
||||
# bun-version: canary
|
||||
# - id: bun-install
|
||||
# name: Install Dependencies
|
||||
# run: bun install
|
||||
# - id: setup-env
|
||||
# name: Setup Environment
|
||||
# run: |
|
||||
# SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
# VERSION=$(bun --version)
|
||||
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
|
||||
# echo "Setup tag: ${TAG}"
|
||||
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
# - id: bun-run
|
||||
# name: Build
|
||||
# run: bun run build
|
||||
# env:
|
||||
# BUN_VERSION: ${{ env.TAG }}
|
||||
# - id: npm-publish
|
||||
# name: Release
|
||||
# uses: JS-DevTools/npm-publish@v1
|
||||
# with:
|
||||
# package: packages/bun-types/dist/package.json
|
||||
# token: ${{ secrets.NPM_TOKEN }}
|
||||
# tag: canary
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: canary
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=canary
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- canary
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
54
.github/workflows/bun-release-types-canary.yml
vendored
54
.github/workflows/bun-release-types-canary.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: bun-release-types-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
257
.github/workflows/bun-release.yml
vendored
257
.github/workflows/bun-release.yml
vendored
@@ -1,257 +0,0 @@
|
||||
name: bun-release
|
||||
concurrency: release
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
type: string
|
||||
description: The tag to publish
|
||||
required: true
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- "${{ env.TAG }}" publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: environment
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: |
|
||||
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=${{ env.TAG }}
|
||||
homebrew:
|
||||
name: Release to Homebrew
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-ruby
|
||||
name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "2.6"
|
||||
- id: update-tap
|
||||
name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.TAG }}"
|
||||
- id: commit-tap
|
||||
name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.TAG }}
|
||||
commit_user_name: robobun
|
||||
commit_user_email: robobun@oven.sh
|
||||
commit_author: robobun <robobun@oven.sh>
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
137
.github/workflows/bun-types-release.yml
vendored
Normal file
137
.github/workflows/bun-types-release.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
name: Release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
name: Test & Build
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install bun
|
||||
uses: xhyrom/setup-bun@v0.1.8
|
||||
with:
|
||||
bun-version: canary
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
|
||||
- name: ESLint
|
||||
run: bun run lint
|
||||
|
||||
- name: Build package
|
||||
run: bun run build
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-types
|
||||
path: packages/bun-types/dist/*
|
||||
if-no-files-found: error
|
||||
|
||||
publish-npm:
|
||||
name: Publish on NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-build]
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bun-types
|
||||
path: packages/bun-types/dist
|
||||
|
||||
- name: Publish on NPM
|
||||
run: cd packages/bun-types/dist/ && npm publish --access public
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
publish-gpr:
|
||||
name: Publish on GPR
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-build]
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
registry-url: 'https://npm.pkg.github.com/'
|
||||
scope: '@oven-sh'
|
||||
|
||||
- name: Install bun
|
||||
uses: xhyrom/setup-bun@v0.1.8
|
||||
with:
|
||||
bun-version: canary
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bun-types
|
||||
path: dist
|
||||
|
||||
- name: Add scope to name
|
||||
run: bun scripts/gpr.ts
|
||||
|
||||
- name: Publish on GPR
|
||||
run: cd dist/ && npm publish --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# no need for separate releases now
|
||||
# create-release:
|
||||
# name: Create Release
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [test-build]
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: packages/bun-types
|
||||
# if: github.repository_owner == 'oven-sh'
|
||||
|
||||
# steps:
|
||||
# - name: Download all artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: bun-types
|
||||
# path: packages/bun-types/dist
|
||||
|
||||
# - name: Set version
|
||||
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
|
||||
|
||||
# - name: Create Release
|
||||
# uses: softprops/action-gh-release@v0.1.14
|
||||
# with:
|
||||
# tag_name: "v${{ env.version }}"
|
||||
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# files: |
|
||||
# dist/*
|
||||
21
.github/workflows/bun-types-tests.yml
vendored
21
.github/workflows/bun-types-tests.yml
vendored
@@ -1,16 +1,16 @@
|
||||
name: bun-types
|
||||
name: TypeScript Types
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
- packages/bun-types/**/*
|
||||
branches: [main]
|
||||
pull_request:
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
- packages/bun-types/**/*
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: type-tests
|
||||
name: Build and Test
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -18,12 +18,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
uses: xhyrom/setup-bun@v0.1.8
|
||||
with:
|
||||
bun-version: canary
|
||||
bun-version: latest
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v3
|
||||
@@ -31,11 +32,13 @@ jobs:
|
||||
node-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
run: bun install
|
||||
|
||||
- name: Generate package
|
||||
run: bun run build
|
||||
|
||||
- name: ESLint
|
||||
run: bun run lint
|
||||
|
||||
- name: Tests
|
||||
run: bun run test
|
||||
|
||||
76
.github/workflows/prettier-fmt.yml
vendored
76
.github/workflows/prettier-fmt.yml
vendored
@@ -1,76 +0,0 @@
|
||||
name: prettier
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
prettier-fmt:
|
||||
name: prettier
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- id: install
|
||||
name: Install prettier
|
||||
run: bun install
|
||||
- name: Run prettier
|
||||
id: fmt
|
||||
run: |
|
||||
rm -f .failed
|
||||
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
|
||||
|
||||
if [ -s .failed ]; then
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `prettier` reported errors
|
||||
|
||||
```js
|
||||
${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
```sh
|
||||
bun fmt
|
||||
```
|
||||
|
||||
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `prettier` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
run: exit 1
|
||||
87
.github/workflows/zig-fmt.yml
vendored
87
.github/workflows/zig-fmt.yml
vendored
@@ -1,87 +0,0 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.11.0-dev.4006+bf827d0b5
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
paths:
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install zig
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
|
||||
|
||||
```zig
|
||||
// # zig fmt --check src/*.zig src/**/*.zig
|
||||
${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
|
||||
```sh
|
||||
zig fmt src/*.zig src/**/*.zig
|
||||
```
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `zig fmt` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
26
.gitignore
vendored
26
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
.DS_Store
|
||||
zig-cache
|
||||
packages/*/*.wasm
|
||||
*.wasm
|
||||
|
||||
*.o
|
||||
*.a
|
||||
profile.json
|
||||
@@ -13,7 +14,7 @@ dist
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
**/package-lock.json
|
||||
/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
zig-out
|
||||
@@ -45,7 +46,6 @@ outcss
|
||||
txt.js
|
||||
.idea
|
||||
.vscode/cpp*
|
||||
.vscode/clang*
|
||||
|
||||
node_modules_*
|
||||
*.jsb
|
||||
@@ -96,8 +96,6 @@ packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
*.bc
|
||||
|
||||
src/fallback.version
|
||||
@@ -112,21 +110,3 @@ misctools/machbench
|
||||
bun-webkit
|
||||
|
||||
src/deps/c-ares/build
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/debug-bindings-obj
|
||||
|
||||
failing-tests.txt
|
||||
test.txt
|
||||
myscript.sh
|
||||
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/test.ts
|
||||
|
||||
src/js/out/modules*
|
||||
src/js/out/functions*
|
||||
src/js/out/tmp
|
||||
src/js/out/DebugPath.h
|
||||
|
||||
make-dev-stats.csv
|
||||
|
||||
10
.gitmodules
vendored
10
.gitmodules
vendored
@@ -65,13 +65,3 @@ fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/c-ares"]
|
||||
path = src/deps/c-ares
|
||||
url = https://github.com/c-ares/c-ares.git
|
||||
[submodule "src/deps/zstd"]
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
@@ -1,13 +1,8 @@
|
||||
src/fallback.html
|
||||
src/bun.js/WebKit
|
||||
src/js/out
|
||||
src/*.out.js
|
||||
src/*out.*.js
|
||||
src/deps
|
||||
src/test/fixtures
|
||||
src/react-refresh.js
|
||||
# src/test
|
||||
test/bun.js/solid-dom-fixtures
|
||||
test/bun.js/bundled
|
||||
#src/bun.js/builtins
|
||||
# src/api/demo
|
||||
test/snapshots
|
||||
test/snapshots-no-hmr
|
||||
test/js/deno/*.test.ts
|
||||
test/js/deno/**/*.test.ts
|
||||
bench/react-hello-world/react-hello-world.node.js
|
||||
|
||||
7
.prettierrc
Normal file
7
.prettierrc
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": false,
|
||||
"bracketSpacing": true,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
module.exports = {
|
||||
arrowParens: "avoid",
|
||||
printWidth: 120,
|
||||
trailingComma: "all",
|
||||
useTabs: false,
|
||||
quoteProps: "preserve",
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.md"],
|
||||
options: {
|
||||
printWidth: 80,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -1,21 +0,0 @@
|
||||
// I would have made this a bash script but there isn't an easy way to track
|
||||
// time in bash sub-second cross platform.
|
||||
import fs from "fs";
|
||||
const start = Date.now() + 5;
|
||||
const result = Bun.spawnSync(process.argv.slice(2), {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
const end = Date.now();
|
||||
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
|
||||
const success = result.exitCode === 0;
|
||||
try {
|
||||
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
|
||||
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
|
||||
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
|
||||
} else {
|
||||
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
process.exit(result.exitCode);
|
||||
@@ -7,7 +7,3 @@ if [ -d ./node_modules/bun-webkit ]; then
|
||||
# get the first matching bun-webkit-* directory name
|
||||
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
|
||||
fi
|
||||
|
||||
# sets up vscode C++ intellisense
|
||||
rm -f .vscode/clang++
|
||||
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null
|
||||
|
||||
24
.vscode/c_cpp_properties.json
vendored
24
.vscode/c_cpp_properties.json
vendored
@@ -12,15 +12,14 @@
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/src/js/out",
|
||||
"${workspaceFolder}/src/bun.js/builtins/",
|
||||
"${workspaceFolder}/src/bun.js/builtins/cpp",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
],
|
||||
"browse": {
|
||||
@@ -33,18 +32,15 @@
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/js/out/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/src/bun.js/builtins/**",
|
||||
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
|
||||
"${workspaceFolder}/src/bun.js/modules/**",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src",
|
||||
"${workspaceFolder}/src/napi"
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
@@ -61,7 +57,7 @@
|
||||
"DU_DISABLE_RENAMING=1"
|
||||
],
|
||||
"macFrameworkPath": [],
|
||||
"compilerPath": "${workspaceFolder}/.vscode/clang++",
|
||||
"compilerPath": "/opt/homebrew/opt/llvm/bin/clang++",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "c++20"
|
||||
}
|
||||
|
||||
6
.vscode/extensions.json
vendored
6
.vscode/extensions.json
vendored
@@ -1,10 +1,8 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ziglang.vscode-zig",
|
||||
"AugusteRame.zls-vscode",
|
||||
"esbenp.prettier-vscode",
|
||||
"xaver.clang-format",
|
||||
"vadimcn.vscode-lldb",
|
||||
"bierner.comment-tagged-templates",
|
||||
"ms-vscode.cpptools"
|
||||
"vadimcn.vscode-lldb"
|
||||
]
|
||||
}
|
||||
|
||||
297
.vscode/launch.json
generated
vendored
297
.vscode/launch.json
generated
vendored
@@ -1,202 +1,105 @@
|
||||
{
|
||||
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
|
||||
// It will force the garbage collector to run after every test and every call to expect()
|
||||
// it makes our tests very slow
|
||||
// But it helps catch memory bugs
|
||||
|
||||
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits:
|
||||
// { "initCommands": ["process handle -p false -s false -n false SIGHUP"] }
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file]",
|
||||
"name": "bun test",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"args": ["wiptest", "${file}"],
|
||||
"cwd": "${workspaceFolder}/test/bun.js",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] (fast)",
|
||||
"name": "bun test (all)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"args": ["wiptest"],
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run current file",
|
||||
"program": "bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${file}/../../",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run (hot)",
|
||||
"program": "bun-debug",
|
||||
"args": ["--hot", "${file}"],
|
||||
"cwd": "${file}/../../",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run",
|
||||
"program": "bun-debug",
|
||||
"args": ["check.tsx", "-c"],
|
||||
"cwd": "${env:HOME}/Build/react-ssr",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] (verbose)",
|
||||
"name": "bun http example",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"args": ["run", "examples/http.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --watch",
|
||||
"name": "bun http file example",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [file] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [*]",
|
||||
"program": "bun-debug",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [*] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test [*] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [file] (gc)",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [file] (verbose)",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"args": ["run", "examples/bun/http-file.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [file] --watch",
|
||||
"name": "bun html-rewriter example",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "--watch", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"args": ["run", "examples/bun/html-rewriter.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [file] --hot",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "--hot", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -211,99 +114,11 @@
|
||||
"request": "launch",
|
||||
"name": "bun build debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["build", "--platform=bun", "--outdir=/tmp/testout", "${file}"],
|
||||
"cwd": "${file}/../../",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
|
||||
},
|
||||
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits.
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"]
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun build debug out.js",
|
||||
"program": "bun-debug",
|
||||
"args": ["--outfile=out.js", "bun", "${file}"],
|
||||
"cwd": "${file}/../",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun build debug STDOUT",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "${file}"],
|
||||
"cwd": "${file}/../",
|
||||
"console": "internalConsole",
|
||||
"env": {}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun build debug (no splitting, browser entry)",
|
||||
"program": "bun-debug",
|
||||
"args": [
|
||||
"--entry-names=./[name].[ext]",
|
||||
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-no-split",
|
||||
"--platform=browser",
|
||||
"bun",
|
||||
"./quick.tsx"
|
||||
],
|
||||
"cwd": "/Users/jarred/Code/bun-rsc",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"NODE_ENV": "production"
|
||||
// "BUN_DEBUG_QUIET_LOGS": "1"
|
||||
// "BUN_DUMP_SYMBOLS": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun build debug (splitting, rsc)",
|
||||
"program": "bun-debug",
|
||||
"args": [
|
||||
"--entry-names=./[name].[ext]",
|
||||
"--outdir=/Users/jarred/Code/bun-rsc/.rsc-split",
|
||||
"--server-components",
|
||||
"--platform=bun",
|
||||
"--splitting",
|
||||
"bun",
|
||||
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
|
||||
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
|
||||
],
|
||||
"cwd": "/Users/jarred/Code/bun-rsc",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"NODE_ENV": "production"
|
||||
// "BUN_DEBUG_QUIET_LOGS": "1"
|
||||
// "BUN_DUMP_SYMBOLS": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun build debug (NO splitting, rsc)",
|
||||
"program": "bun-debug",
|
||||
"args": [
|
||||
"--entry-names=./[name].[ext]",
|
||||
"--outdir=/Users/jarred/Code/bun-rsc/.rsccheck",
|
||||
"--server-components",
|
||||
"--platform=bun",
|
||||
"bun",
|
||||
"/Users/jarred/Code/bun-rsc/pages/index.js"
|
||||
],
|
||||
"cwd": "/Users/jarred/Code/bun-rsc",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"NODE_ENV": "production"
|
||||
// "BUN_DEBUG_QUIET_LOGS": "1"
|
||||
// "BUN_DUMP_SYMBOLS": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -324,7 +139,7 @@
|
||||
"name": "bun install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
|
||||
50
.vscode/settings.json
vendored
50
.vscode/settings.json
vendored
@@ -7,15 +7,13 @@
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
|
||||
"zig.formattingProvider": "extension",
|
||||
"zig.buildArgs": ["obj", "-Dfor-editor"],
|
||||
"zig.buildOption": "build",
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"[zig]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.useTabStops": false,
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
"editor.defaultFormatter": "tiehuis.zig",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[ts]": {
|
||||
@@ -26,8 +24,6 @@
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"zig.zls.enableInlayHints": false,
|
||||
|
||||
"[jsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
@@ -36,22 +32,8 @@
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[yaml]": {
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[markdown]": {
|
||||
"editor.unicodeHighlight.ambiguousCharacters": false,
|
||||
"editor.unicodeHighlight.invisibleCharacters": false,
|
||||
"diffEditor.ignoreTrimWhitespace": false,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.wordWrap": "on",
|
||||
"editor.quickSuggestions": {
|
||||
"comments": "off",
|
||||
"strings": "off",
|
||||
"other": "off"
|
||||
}
|
||||
},
|
||||
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
|
||||
"zig.testCmd": "make test ${file} ${filter} ${bin}",
|
||||
"lldb.verboseLogging": false,
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
@@ -77,9 +59,7 @@
|
||||
"src/deps/lol-html": true,
|
||||
"src/deps/c-ares": true,
|
||||
"src/deps/tinycc": true,
|
||||
"src/deps/zstd": true,
|
||||
"test/snippets/package-json-exports/_node_modules_copy": true,
|
||||
"src/js/out": true
|
||||
"test/snippets/package-json-exports/_node_modules_copy": true
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
@@ -202,25 +182,9 @@
|
||||
"ethernet.h": "c",
|
||||
"inet.h": "c",
|
||||
"packet.h": "c",
|
||||
"queue": "cpp",
|
||||
"compare": "cpp",
|
||||
"concepts": "cpp",
|
||||
"typeindex": "cpp",
|
||||
"__verbose_abort": "cpp",
|
||||
"__std_stream": "cpp",
|
||||
"any": "cpp",
|
||||
"charconv": "cpp",
|
||||
"csignal": "cpp",
|
||||
"format": "cpp",
|
||||
"forward_list": "cpp",
|
||||
"future": "cpp",
|
||||
"regex": "cpp",
|
||||
"span": "cpp",
|
||||
"valarray": "cpp",
|
||||
"codecvt": "cpp"
|
||||
"queue": "cpp"
|
||||
},
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"eslint.workingDirectories": ["packages/bun-types"],
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
"C_Cpp.errorSquiggles": "Enabled",
|
||||
"eslint.workingDirectories": ["packages/bun-types"]
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
# Contributing to Bun
|
||||
|
||||
> **Important:** All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
|
||||
|
||||
## Bun's codebase
|
||||
|
||||
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
|
||||
|
||||
Today (February 2023), Bun's codebase has five distinct parts:
|
||||
|
||||
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
|
||||
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
|
||||
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
|
||||
- Package manager ([`src/install/`](src/install/))
|
||||
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
|
||||
|
||||
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
|
||||
|
||||
## Getting started
|
||||
|
||||
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
|
||||
|
||||
## Memory management in Bun
|
||||
|
||||
For the Zig code, please:
|
||||
|
||||
1. Do your best to avoid dynamically allocating memory.
|
||||
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
|
||||
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
|
||||
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
|
||||
|
||||
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
|
||||
|
||||
## JavaScript runtime
|
||||
|
||||
Most of Bun's JavaScript runtime code lives in [`src/bun.js`](src/bun.js).
|
||||
|
||||
### Calling C++ from Zig & Zig from C++
|
||||
|
||||
TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and [`bindings.cpp`](src/bun.js/bindings/bindings.cpp) for now)
|
||||
|
||||
### Adding a new JavaScript class
|
||||
|
||||
1. Add a new file in [`src/bun.js/*.classes.ts`](src/bun.js) to define the instance and static methods for the class.
|
||||
2. Add a new file in [`src/bun.js/**/*.zig`](src/bun.js) and expose the struct in [`src/bun.js/generated_classes_list.zig`](src/bun.js/generated_classes_list.zig)
|
||||
3. Run `make codegen`
|
||||
|
||||
Copy from examples like `Subprocess` or `Response`.
|
||||
|
||||
### ESM Modules and Builtins JS
|
||||
|
||||
Bun implements ESM modules in a mix of native code and JavaScript.
|
||||
|
||||
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
|
||||
|
||||
Builtin modules in Bun are located in [`src/js`](src/js/). These files are transpiled and support a JavaScriptCore-only syntax for internal slots, which is explained further in [`src/js/README.md`](src/js/README.md).
|
||||
|
||||
Native C++ modules are in `src/bun.js/modules/`.
|
||||
|
||||
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
|
||||
|
||||
### Memory management in Bun's JavaScript runtime
|
||||
|
||||
TODO: fill this out (for now, use `JSC.Strong` in most cases)
|
||||
|
||||
### Strings
|
||||
|
||||
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
|
||||
|
||||
#### JavaScriptCore C API
|
||||
|
||||
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
|
||||
|
||||
## Testing
|
||||
|
||||
See [`test/README.md`](test/README.md) for information on how to run tests.
|
||||
79
Dockerfile
79
Dockerfile
@@ -10,9 +10,9 @@ ARG ARCH=x86_64
|
||||
ARG BUILD_MACHINE_ARCH=x86_64
|
||||
ARG TRIPLET=${ARCH}-linux-gnu
|
||||
ARG BUILDARCH=amd64
|
||||
ARG WEBKIT_TAG=2023-aug3-4
|
||||
ARG WEBKIT_TAG=jul27-2
|
||||
ARG ZIG_TAG=jul1
|
||||
ARG ZIG_VERSION="0.11.0-dev.4006+bf827d0b5"
|
||||
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
|
||||
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
|
||||
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
|
||||
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
|
||||
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
|
||||
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
|
||||
ARG GIT_SHA=""
|
||||
ARG BUN_BASE_VERSION=0.7
|
||||
ARG BUN_BASE_VERSION=0.4
|
||||
|
||||
FROM bitnami/minideb:bullseye as bun-base
|
||||
|
||||
@@ -122,8 +122,8 @@ ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV CCACHE_DIR=/ccache
|
||||
ENV JSC_BASE_DIR=${WEBKIT_DIR}
|
||||
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
@@ -149,9 +149,6 @@ ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
|
||||
|
||||
@@ -295,27 +292,6 @@ WORKDIR $BUN_DIR
|
||||
RUN cd $BUN_DIR && \
|
||||
make uws && rm -rf src/deps/uws Makefile
|
||||
|
||||
FROM bun-base as base64
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG ZIG_PATH
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR
|
||||
ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make base64 && rm -rf src/deps/base64 Makefile
|
||||
|
||||
FROM bun-base as picohttp
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
@@ -327,6 +303,8 @@ ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
@@ -423,7 +401,6 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
@@ -448,7 +425,7 @@ ENV CCACHE_DIR=/ccache
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
|
||||
mkdir -p $BUN_RELEASE_DIR && \
|
||||
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
|
||||
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
|
||||
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
|
||||
|
||||
FROM scratch as build_release_obj
|
||||
@@ -482,20 +459,19 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV JSC_BASE_DIR=${WEBKIT_DIR}
|
||||
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
|
||||
# Required for webcrypto bindings
|
||||
# Required for `make webcrypto`
|
||||
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
|
||||
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && make webcrypto && \
|
||||
make release-bindings -j10 && mv ${BUN_DEPS_OUT_DIR}/libwebcrypto.a /tmp && mv src/bun.js/bindings-obj/* /tmp
|
||||
|
||||
FROM bun-base as sqlite
|
||||
|
||||
@@ -508,14 +484,10 @@ ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
|
||||
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
@@ -524,36 +496,10 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
|
||||
|
||||
FROM bun-base as zstd
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG ZIG_PATH
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR
|
||||
ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
|
||||
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV JSC_BASE_DIR=${WEBKIT_DIR}
|
||||
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
|
||||
|
||||
FROM scratch as build_release_cpp
|
||||
|
||||
COPY --from=compile_cpp /tmp/*.o /
|
||||
COPY --from=compile_cpp /tmp/libwebcrypto.a /
|
||||
|
||||
FROM prepare_release as build_release
|
||||
|
||||
@@ -569,7 +515,6 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
@@ -577,14 +522,12 @@ ENV JSC_BASE_DIR=${WEBKIT_DIR}
|
||||
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
|
||||
126
Dockerfile.devcontainer
Normal file
126
Dockerfile.devcontainer
Normal file
@@ -0,0 +1,126 @@
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG BUILDARCH=amd64
|
||||
ARG ARCH=x86_64
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
|
||||
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
|
||||
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
|
||||
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
|
||||
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
|
||||
|
||||
FROM --platform=linux/${BUILDARCH} ubuntu:22.04 as bun.devcontainer
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG BUILDARCH
|
||||
ARG ZIG_PATH
|
||||
ARG WEBKIT_DIR
|
||||
ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG BUN_PACKAGES_DIR
|
||||
|
||||
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
|
||||
ENV LIB_ICU_PATH ${WEBKIT_OUT_DIR}/lib
|
||||
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
|
||||
ENV PATH "${BUN_PACKAGES_DIR}/bun-linux-x64:${BUN_PACKAGES_DIR}/bun-linux-aarch64:${BUN_PACKAGES_DIR}/debug-bun-linux-x64:${BUN_PACKAGES_DIR}/debug-bun-linux-aarch64:$PATH"
|
||||
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
|
||||
ENV BUN_INSTALL /home/ubuntu/.bun
|
||||
ENV XDG_CONFIG_HOME /home/ubuntu/.config
|
||||
|
||||
WORKDIR ${GITHUB_WORKSPACE}
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
|
||||
add-apt-repository ppa:longsleep/golang-backports && \
|
||||
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
|
||||
chmod +x llvm.sh && \
|
||||
./llvm.sh 15 && \
|
||||
curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
cmake \
|
||||
build-essential \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-15-dev \
|
||||
libclang-15-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
clang-15 \
|
||||
clang-format-15 \
|
||||
libc++-15-dev \
|
||||
libc++abi-15-dev \
|
||||
lld-15 \
|
||||
libicu-dev \
|
||||
wget \
|
||||
rustc \
|
||||
cargo \
|
||||
unzip \
|
||||
tar \
|
||||
golang-go ninja-build pkg-config automake autoconf libtool curl && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-15 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-15 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-15 90 && \
|
||||
npm install -g esbuild
|
||||
|
||||
ENV CC=clang-15
|
||||
ENV CXX=clang++-15
|
||||
ENV ZIG "${ZIG_PATH}/zig"
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
|
||||
ARG ZIG_FOLDERNAME
|
||||
ARG ZIG_FILENAME
|
||||
ARG ZIG_URL
|
||||
|
||||
ADD $ZIG_URL .
|
||||
RUN tar -xf ${ZIG_FILENAME} && \
|
||||
rm ${ZIG_FILENAME} && \
|
||||
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
|
||||
|
||||
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-15 90
|
||||
|
||||
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
COPY ".devcontainer/scripts/" /scripts/
|
||||
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
|
||||
COPY ".devcontainer/README.md" $GITHUB_WORKSPACE/README.md
|
||||
|
||||
ENV JSC_BASE_DIR=$WEBKIT_DIR
|
||||
ENV WEBKIT_RELEASE_DIR=$WEBKIT_DIR
|
||||
ENV WEBKIT_DEBUG_DIR=$WEBKIT_DIR
|
||||
ENV WEBKIT_RELEASE_DIR_LTO=$WEBKIT_DIR
|
||||
|
||||
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
|
||||
bash /scripts/common-debian.sh && \
|
||||
bash /scripts/github.sh && \
|
||||
bash /scripts/nice.sh && \
|
||||
bash /scripts/zig-env.sh
|
||||
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json
|
||||
@@ -1,30 +0,0 @@
|
||||
// https://github.com/nodejs/node/issues/34493
|
||||
import { AsyncLocalStorage } from "async_hooks";
|
||||
const asyncLocalStorage = new AsyncLocalStorage();
|
||||
|
||||
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
|
||||
let fn = () => /test/.test("test");
|
||||
|
||||
let runWithExpiry = async (expiry, fn) => {
|
||||
let iterations = 0;
|
||||
while (Date.now() < expiry) {
|
||||
await fn();
|
||||
iterations++;
|
||||
}
|
||||
return iterations;
|
||||
};
|
||||
|
||||
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
|
||||
|
||||
let withAls;
|
||||
await asyncLocalStorage.run(123, async () => {
|
||||
withAls = await runWithExpiry(Date.now() + 45000, fn);
|
||||
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
|
||||
});
|
||||
|
||||
asyncLocalStorage.disable();
|
||||
|
||||
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
|
||||
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
|
||||
|
||||
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
171
bench/bundle/.gitignore
vendored
171
bench/bundle/.gitignore
vendored
@@ -1,171 +0,0 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
esbuild
|
||||
@@ -1,40 +0,0 @@
|
||||
# Bundler benchmark
|
||||
|
||||
This is a performance benchmark of the following bundlers:
|
||||
|
||||
- Bun
|
||||
- esbuild
|
||||
- Parcel 2
|
||||
- Rollup + Terser
|
||||
- Webpack
|
||||
|
||||
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
|
||||
|
||||
To run the benchmark:
|
||||
|
||||
```sh
|
||||
$ chmod +x run-bench.sh
|
||||
$ ./run-bench.sh
|
||||
```
|
||||
|
||||
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
|
||||
|
||||
```sh
|
||||
real <number>
|
||||
user <number>
|
||||
sys <number>
|
||||
```
|
||||
|
||||
These lines are generated by the `time` command which is used to benchmark each build.
|
||||
|
||||
## Results
|
||||
|
||||
The `real` results, as run on a 16-inch M1 Macbook Pro:
|
||||
|
||||
| Bundler | Time |
|
||||
| ------- | ------ |
|
||||
| Bun | 0.17s |
|
||||
| esbuild | 0.33s |
|
||||
| Rollup | 18.82s |
|
||||
| Webpack | 26.21 |
|
||||
| Parcel | 17.95s |
|
||||
Binary file not shown.
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "bundle",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"bun-types": "^0.7.0"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
git clone git@github.com:colinhacks/esbuild.git
|
||||
cd esbuild
|
||||
make bench-three
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"ESNext"
|
||||
],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"jsx": "react-jsx",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"types": [
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -6,4 +6,7 @@ const arg = process.argv.slice(1);
|
||||
|
||||
// TODO: remove Buffer.from() when readFileSync() returns Buffer
|
||||
|
||||
for (let i = 0; i < count; i++) console.log(arg.map(file => Buffer.from(readFileSync(file, "utf8"))).join(""));
|
||||
for (let i = 0; i < count; i++)
|
||||
console.log(
|
||||
arg.map((file) => Buffer.from(readFileSync(file, "utf8"))).join("")
|
||||
);
|
||||
|
||||
@@ -7,8 +7,14 @@ function runner(ready) {
|
||||
for (let i = 0; i < size; i++) {
|
||||
rand[i] = (Math.random() * 1024 * 1024) | 0;
|
||||
}
|
||||
const dest = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
|
||||
const src = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
|
||||
const dest = `/tmp/fs-test-copy-file-${(
|
||||
(Math.random() * 10000000 + 100) |
|
||||
0
|
||||
).toString(32)}`;
|
||||
const src = `/tmp/fs-test-copy-file-${(
|
||||
(Math.random() * 10000000 + 100) |
|
||||
0
|
||||
).toString(32)}`;
|
||||
writeFileSync(src, Buffer.from(rand.buffer), { encoding: "buffer" });
|
||||
const { size: fileSize } = statSync(src);
|
||||
if (fileSize !== rand.byteLength) {
|
||||
@@ -29,6 +35,6 @@ runner((src, dest, rand) =>
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
}),
|
||||
})
|
||||
);
|
||||
await run();
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
import EventEmitter3 from "eventemitter3";
|
||||
import { group } from "mitata";
|
||||
import EventEmitterNative from "node:events";
|
||||
|
||||
export const implementations = [
|
||||
{
|
||||
EventEmitter: EventEmitterNative,
|
||||
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
|
||||
monkey: true,
|
||||
},
|
||||
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
|
||||
].filter(Boolean);
|
||||
|
||||
for (const impl of implementations) {
|
||||
impl.EventEmitter?.setMaxListeners?.(Infinity);
|
||||
}
|
||||
|
||||
export function groupForEmitter(name, cb) {
|
||||
if (implementations.length === 1) {
|
||||
return cb({
|
||||
...implementations[0],
|
||||
name: `${name}: ${implementations[0].name}`,
|
||||
});
|
||||
} else {
|
||||
return group(name, () => {
|
||||
for (let impl of implementations) {
|
||||
cb(impl);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
groupForEmitter("single emit", ({ EventEmitter, name }) => {
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
emitter.on("hello", event => {
|
||||
event.preventDefault();
|
||||
});
|
||||
|
||||
bench(name, () => {
|
||||
emitter.emit("hello", {
|
||||
preventDefault() {
|
||||
id++;
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
bench(name, () => {
|
||||
var cb = event => {
|
||||
event.preventDefault();
|
||||
};
|
||||
emitter.on("hey", cb);
|
||||
var called = false;
|
||||
for (let i = 0; i < 10_000; i++)
|
||||
emitter.emit("hey", {
|
||||
preventDefault() {
|
||||
id++;
|
||||
called = true;
|
||||
},
|
||||
});
|
||||
|
||||
if (!called) throw new Error("not called");
|
||||
});
|
||||
});
|
||||
|
||||
// for (let { impl: EventEmitter, name, monkey } of []) {
|
||||
// if (monkey) {
|
||||
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
|
||||
// monkeyEmitter.on("hello", event => {
|
||||
// event.preventDefault();
|
||||
// });
|
||||
|
||||
// bench(`[monkey] ${className}.emit`, () => {
|
||||
// var called = false;
|
||||
// monkeyEmitter.emit("hello", {
|
||||
// preventDefault() {
|
||||
// id++;
|
||||
// called = true;
|
||||
// },
|
||||
// });
|
||||
|
||||
// if (!called) {
|
||||
// throw new Error("monkey failed");
|
||||
// }
|
||||
// });
|
||||
|
||||
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
|
||||
// var cb = () => {
|
||||
// event.preventDefault();
|
||||
// };
|
||||
// monkeyEmitter.on("hey", cb);
|
||||
// for (let i = 0; i < 10_000; i++)
|
||||
// monkey.emit("hey", {
|
||||
// preventDefault() {
|
||||
// id++;
|
||||
// },
|
||||
// });
|
||||
// monkeyEmitter.off("hey", cb);
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
|
||||
// var target = new EventTarget();
|
||||
// target.addEventListener("hello", event => {});
|
||||
// bench("EventTarget.dispatch", () => {
|
||||
// target.dispatchEvent(event);
|
||||
// });
|
||||
|
||||
// var hey = new Event("hey");
|
||||
|
||||
// bench("EventTarget.on x 10_000 (handler)", () => {
|
||||
// var handler = event => {};
|
||||
// target.addEventListener("hey", handler);
|
||||
|
||||
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
|
||||
// target.removeEventListener("hey", handler);
|
||||
// });
|
||||
|
||||
await run();
|
||||
@@ -1,40 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
groupForEmitter("test 1", ({ EventEmitter, name }) => {
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
emitter.on("hello", event => {
|
||||
event.preventDefault();
|
||||
});
|
||||
|
||||
bench(name, () => {
|
||||
emitter.once("hello", event => {
|
||||
event.preventDefault();
|
||||
});
|
||||
emitter.emit("hello", {
|
||||
preventDefault() {
|
||||
id++;
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
groupForEmitter("test 2", ({ EventEmitter, name }) => {
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
bench(name, () => {
|
||||
emitter.once("hello", event => {
|
||||
event.preventDefault();
|
||||
});
|
||||
emitter.emit("hello", {
|
||||
preventDefault() {
|
||||
id++;
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,63 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
|
||||
let rngState = 123456789;
|
||||
function nextInt() {
|
||||
const m = 0x80000000; // 2**31;
|
||||
const a = 1103515245;
|
||||
const c = 12345;
|
||||
rngState = (a * rngState + c) % m;
|
||||
return rngState;
|
||||
}
|
||||
function nextRange(start, end) {
|
||||
// returns in range [start, end): including start, excluding end
|
||||
// can't modulu nextInt because of weak randomness in lower bits
|
||||
const rangeSize = end - start;
|
||||
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
|
||||
return start + Math.floor(randomUnder1 * rangeSize);
|
||||
}
|
||||
|
||||
const chunks = new Array(1024).fill(null).map((_, j) => {
|
||||
const arr = new Uint8Array(1024);
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
arr[i] = nextRange(0, 256);
|
||||
}
|
||||
return arr;
|
||||
});
|
||||
|
||||
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
|
||||
bench(name, () => {
|
||||
let id = 0;
|
||||
const stream = new EventEmitter();
|
||||
|
||||
stream.on("start", res => {
|
||||
if (res.status !== 200) throw new Error("not 200");
|
||||
});
|
||||
|
||||
const recived = [];
|
||||
stream.on("data", req => {
|
||||
recived.push(req);
|
||||
});
|
||||
|
||||
stream.on("end", ev => {
|
||||
ev.preventDefault();
|
||||
});
|
||||
|
||||
// simulate a stream
|
||||
stream.emit("start", { status: 200 });
|
||||
for (let chunk of chunks) {
|
||||
stream.emit("data", chunk);
|
||||
}
|
||||
stream.emit("end", {
|
||||
preventDefault() {
|
||||
id++;
|
||||
},
|
||||
});
|
||||
|
||||
if (id !== 1) throw new Error("not implemented right");
|
||||
if (recived.length !== 1024) throw new Error("not implemented right");
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
Binary file not shown.
@@ -1 +1 @@
|
||||
console.log("Hello via Bun!");
|
||||
console.log("Hello via Bun!");
|
||||
@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
|
||||
const requests = new Array(count);
|
||||
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
|
||||
r.text(),
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(requests);
|
||||
|
||||
@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
|
||||
const requests = new Array(count);
|
||||
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
|
||||
r.text(),
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(requests);
|
||||
|
||||
@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
|
||||
const requests = new Array(count);
|
||||
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
|
||||
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
|
||||
r.text()
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(requests);
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
|
||||
import { run, bench, group } from "mitata";
|
||||
|
||||
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
|
||||
const { napiNoop, napiHash, napiString } = require(import.meta.dir +
|
||||
"/src/ffi_napi_bench.node");
|
||||
|
||||
const {
|
||||
symbols: {
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
|
||||
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
|
||||
const path = new URL(
|
||||
"src/target/release/libffi_napi_bench." + extension,
|
||||
import.meta.url,
|
||||
).pathname;
|
||||
|
||||
const {
|
||||
symbols: { ffi_noop, ffi_hash, ffi_string },
|
||||
|
||||
Binary file not shown.
@@ -29,7 +29,7 @@ loop:
|
||||
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
|
||||
-e 'end tell'
|
||||
sleep 0.5
|
||||
cd src; zig run -Doptimize=ReleaseFast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
|
||||
cd src; zig run -Drelease-fast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
|
||||
cp src/colors.css.blob $(PROJECT)/colors.css.blob
|
||||
|
||||
loop-emotion:
|
||||
@@ -40,7 +40,7 @@ loop-emotion:
|
||||
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
|
||||
-e 'end tell'
|
||||
sleep 0.5
|
||||
cd src; zig run -Doptimize=ReleaseFast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
|
||||
cd src; zig run -Drelease-fast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
|
||||
cp src/css-in-js-styles.tsx.blob $(PROJECT)/css-in-js-styles.blob
|
||||
|
||||
process_video:
|
||||
|
||||
@@ -24,7 +24,7 @@ if (process.env.PROJECT === "bun") {
|
||||
|
||||
// bunProcess.stderr.pipe(process.stderr);
|
||||
// bunProcess.stdout.pipe(process.stdout);
|
||||
bunProcess.once("error", err => {
|
||||
bunProcess.once("error", (err) => {
|
||||
console.error("❌ bun error", err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -32,15 +32,19 @@ if (process.env.PROJECT === "bun") {
|
||||
bunProcess?.kill(0);
|
||||
});
|
||||
} else if (process.env.PROJECT === "next") {
|
||||
const bunProcess = child_process.spawn("./node_modules/.bin/next", ["--port", "8080"], {
|
||||
cwd: process.cwd(),
|
||||
stdio: "ignore",
|
||||
env: {
|
||||
...process.env,
|
||||
},
|
||||
const bunProcess = child_process.spawn(
|
||||
"./node_modules/.bin/next",
|
||||
["--port", "8080"],
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
stdio: "ignore",
|
||||
env: {
|
||||
...process.env,
|
||||
},
|
||||
|
||||
shell: false,
|
||||
});
|
||||
shell: false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const delay = new Promise((resolve, reject) => {
|
||||
@@ -107,7 +111,7 @@ async function main() {
|
||||
return runPage();
|
||||
}
|
||||
|
||||
main().catch(error =>
|
||||
main().catch((error) =>
|
||||
setTimeout(() => {
|
||||
throw error;
|
||||
}),
|
||||
|
||||
Binary file not shown.
@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
|
||||
var position = try std.fmt.parseInt(u32, position_str, 10);
|
||||
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
|
||||
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
|
||||
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
|
||||
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
|
||||
std.debug.assert(ms > 0);
|
||||
// std.debug.assert(std.math.isFinite(position));
|
||||
var prng = std.rand.DefaultPrng.init(0);
|
||||
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
|
||||
);
|
||||
};
|
||||
|
||||
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters, 0..) |count, i| {
|
||||
for (counters) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
|
||||
var position = try std.fmt.parseInt(u32, position_str, 10);
|
||||
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
|
||||
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
|
||||
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
|
||||
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
|
||||
std.debug.assert(ms > 0);
|
||||
// std.debug.assert(std.math.isFinite(position));
|
||||
var prng = std.rand.DefaultPrng.init(0);
|
||||
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
|
||||
\\
|
||||
++ SIMULATE_LONG_FILE;
|
||||
|
||||
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters, 0..) |count, i| {
|
||||
for (counters) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,9 @@ const path = require("path");
|
||||
const PROJECT = process.env.PROJECT || "bun";
|
||||
const percentile = require("percentile");
|
||||
const PACKAGE_NAME = process.env.PACKAGE_NAME;
|
||||
const label = `${PACKAGE_NAME}@${require(PACKAGE_NAME + "/package.json").version}`;
|
||||
const label = `${PACKAGE_NAME}@${
|
||||
require(PACKAGE_NAME + "/package.json").version
|
||||
}`;
|
||||
|
||||
const BASEFOLDER = path.resolve(PROJECT);
|
||||
const OUTFILE = path.join(process.cwd(), process.env.OUTFILE);
|
||||
@@ -18,10 +20,10 @@ const TOTAL_FRAMES = VALID_TIMES.length;
|
||||
const timings = fs
|
||||
.readFileSync(BASEFOLDER + "/frames.all.clean", "utf8")
|
||||
.split("\n")
|
||||
.map(a => a.replace(/[Ran:'\.]?/gm, "").trim())
|
||||
.filter(a => parseInt(a, 10))
|
||||
.filter(a => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
|
||||
.map(num => BigInt(num));
|
||||
.map((a) => a.replace(/[Ran:'\.]?/gm, "").trim())
|
||||
.filter((a) => parseInt(a, 10))
|
||||
.filter((a) => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
|
||||
.map((num) => BigInt(num));
|
||||
|
||||
timings.sort();
|
||||
|
||||
@@ -45,7 +47,7 @@ const report = {
|
||||
name: PACKAGE_NAME,
|
||||
version: require(PACKAGE_NAME + "/package.json").version,
|
||||
},
|
||||
timestamps: timings.map(a => Number(a)),
|
||||
timestamps: timings.map((a) => Number(a)),
|
||||
frameTimes: frameTime,
|
||||
percentileMs: {
|
||||
50: percentile(50, frameTime) / 10,
|
||||
@@ -65,7 +67,9 @@ fs.writeFileSync(
|
||||
"." +
|
||||
process.env.SLEEP_INTERVAL +
|
||||
"ms." +
|
||||
`${process.platform}-${process.arch === "arm64" ? "aarch64" : process.arch}` +
|
||||
`${process.platform}-${
|
||||
process.arch === "arm64" ? "aarch64" : process.arch
|
||||
}` +
|
||||
".json",
|
||||
),
|
||||
JSON.stringify(report, null, 2),
|
||||
@@ -95,5 +99,9 @@ console.log(
|
||||
timings.length,
|
||||
"/",
|
||||
TOTAL_FRAMES,
|
||||
"(" + Math.round(Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100) + "%)",
|
||||
"(" +
|
||||
Math.round(
|
||||
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100,
|
||||
) +
|
||||
"%)",
|
||||
);
|
||||
|
||||
@@ -3,7 +3,10 @@ import classNames from "classnames";
|
||||
import ReactDOM from "react-dom";
|
||||
|
||||
const Base = ({}) => {
|
||||
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
|
||||
const name =
|
||||
typeof location !== "undefined"
|
||||
? decodeURIComponent(location.search.substring(1))
|
||||
: null;
|
||||
return <Main productName={name} />;
|
||||
};
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
|
||||
<header>
|
||||
<div className="Title">CSS HMR Stress Test!</div>
|
||||
<p className="Description">
|
||||
This page visually tests how quickly a bundler can update {props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot
|
||||
Module Reloading.
|
||||
This page visually tests how quickly a bundler can update{" "}
|
||||
{props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
|
||||
</p>
|
||||
</header>
|
||||
<main className="main">
|
||||
@@ -19,7 +19,9 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
|
||||
<div className="ProgressBar-container">
|
||||
<div className="ProgressBar"></div>
|
||||
</div>
|
||||
<div className="SectionLabel">The progress bar should move from left to right smoothly.</div>
|
||||
<div className="SectionLabel">
|
||||
The progress bar should move from left to right smoothly.
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
@@ -40,15 +42,21 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
|
||||
<div className="Spinner"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="SectionLabel">The spinners should rotate & change color smoothly.</div>
|
||||
<div className="SectionLabel">
|
||||
The spinners should rotate & change color smoothly.
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
<footer>
|
||||
<div className="SectionLabel FooterLabel">There are no CSS animations on this page.</div>
|
||||
<div className="SectionLabel FooterLabel">
|
||||
There are no CSS animations on this page.
|
||||
</div>
|
||||
|
||||
<div className="Bundler-container">
|
||||
<div className="Bundler">{props.productName}</div>
|
||||
<div className="Bundler-updateRate">{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}</div>
|
||||
<div className="Bundler-updateRate">
|
||||
{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
</>
|
||||
|
||||
@@ -5,4 +5,4 @@
|
||||
"jsx": "react-jsx",
|
||||
"paths": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
/** @type {import('eslint').Linter.Config} */
|
||||
module.exports = {
|
||||
extends: ["@remix-run/eslint-config", "@remix-run/eslint-config/node"],
|
||||
};
|
||||
10
bench/install/.gitignore
vendored
10
bench/install/.gitignore
vendored
@@ -1,10 +0,0 @@
|
||||
node_modules
|
||||
|
||||
/.cache
|
||||
/build
|
||||
/public/build
|
||||
.env
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
pnpm-lock.yaml
|
||||
bun.lockb
|
||||
@@ -1,18 +0,0 @@
|
||||
# `install` benchmark
|
||||
|
||||
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
|
||||
|
||||
```
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
|
||||
```
|
||||
|
||||
To check that the app is working as expected:
|
||||
|
||||
```
|
||||
$ bun run dev
|
||||
$ npm run dev
|
||||
$ yarn dev
|
||||
$ pnpm dev
|
||||
```
|
||||
|
||||
Then visit [http://localhost:3000](http://localhost:3000).
|
||||
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* By default, Remix will handle hydrating your app on the client for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
|
||||
*/
|
||||
|
||||
import { RemixBrowser } from "@remix-run/react";
|
||||
import { startTransition, StrictMode } from "react";
|
||||
import { hydrateRoot } from "react-dom/client";
|
||||
|
||||
startTransition(() => {
|
||||
hydrateRoot(
|
||||
document,
|
||||
<StrictMode>
|
||||
<RemixBrowser />
|
||||
</StrictMode>,
|
||||
);
|
||||
});
|
||||
@@ -1,101 +0,0 @@
|
||||
/**
|
||||
* By default, Remix will handle generating the HTTP Response for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
|
||||
*/
|
||||
|
||||
import { PassThrough } from "node:stream";
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { Response } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import isbot from "isbot";
|
||||
import { renderToPipeableStream } from "react-dom/server";
|
||||
|
||||
const ABORT_DELAY = 5_000;
|
||||
|
||||
export default function handleRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return isbot(request.headers.get("user-agent"))
|
||||
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
|
||||
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
|
||||
}
|
||||
|
||||
function handleBotRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onAllReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
responseStatusCode = 500;
|
||||
console.error(error);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
|
||||
function handleBrowserRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onShellReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
console.error(error);
|
||||
responseStatusCode = 500;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
<Meta />
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
<Outlet />
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import type { V2_MetaFunction } from "@remix-run/node";
|
||||
|
||||
export const meta: V2_MetaFunction = () => {
|
||||
return [{ title: "New Remix App" }];
|
||||
};
|
||||
|
||||
export default function Index() {
|
||||
return (
|
||||
<div style={{ fontFamily: "system-ui, sans-serif", lineHeight: "1.4" }}>
|
||||
<h1>Welcome to Remix</h1>
|
||||
<ul>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/blog" rel="noreferrer">
|
||||
15m Quickstart Blog Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/jokes" rel="noreferrer">
|
||||
Deep Dive Jokes App Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/docs" rel="noreferrer">
|
||||
Remix Docs
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"private": true,
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "remix dev",
|
||||
"start": "remix-serve build",
|
||||
"typecheck": "tsc",
|
||||
"clean": "rm -rf node_modules",
|
||||
"bench": "hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/node": "^1.15.0",
|
||||
"@remix-run/react": "^1.15.0",
|
||||
"@remix-run/serve": "^1.15.0",
|
||||
"isbot": "^3.6.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^1.15.0",
|
||||
"@remix-run/eslint-config": "^1.15.0",
|
||||
"@types/react": "^18.0.25",
|
||||
"@types/react-dom": "^18.0.8",
|
||||
"eslint": "^8.27.0",
|
||||
"typescript": "^4.8.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 17 KiB |
@@ -1,14 +0,0 @@
|
||||
/** @type {import('@remix-run/dev').AppConfig} */
|
||||
module.exports = {
|
||||
ignoredRouteFiles: ["**/.*"],
|
||||
// appDirectory: "app",
|
||||
// assetsBuildDirectory: "public/build",
|
||||
// serverBuildPath: "build/index.js",
|
||||
// publicPath: "/build/",
|
||||
future: {
|
||||
v2_errorBoundary: true,
|
||||
v2_meta: true,
|
||||
v2_normalizeFormMethod: true,
|
||||
v2_routeConvention: true,
|
||||
},
|
||||
};
|
||||
2
bench/install/remix.env.d.ts
vendored
2
bench/install/remix.env.d.ts
vendored
@@ -1,2 +0,0 @@
|
||||
/// <reference types="@remix-run/dev" />
|
||||
/// <reference types="@remix-run/node" />
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2019"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2019",
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,12 @@
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
|
||||
bench("JSON.stringify({hello: 'world'})", () =>
|
||||
JSON.stringify({ hello: "world" }),
|
||||
);
|
||||
|
||||
const otherUint8Array = new Uint8Array(1024);
|
||||
bench("Uint8Array.from(otherUint8Array)", () => Uint8Array.from(otherUint8Array));
|
||||
bench("Uint8Array.from(otherUint8Array)", () =>
|
||||
Uint8Array.from(otherUint8Array),
|
||||
);
|
||||
|
||||
run();
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("console.log('hello')", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
bench("console.log({ hello: 'object' })", () =>
|
||||
console.log({ hello: "object" }),
|
||||
);
|
||||
await run();
|
||||
|
||||
@@ -47,7 +47,11 @@ fs.writeFileSync(
|
||||
output + `/file${count}.mjs`,
|
||||
`
|
||||
export const THE_END = true;
|
||||
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");` : ""}
|
||||
${
|
||||
saveStack
|
||||
? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");`
|
||||
: ""
|
||||
}
|
||||
`,
|
||||
"utf8",
|
||||
);
|
||||
@@ -56,7 +60,11 @@ fs.writeFileSync(
|
||||
output + `/file${count}.js`,
|
||||
`
|
||||
module.exports.THE_END = true;
|
||||
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.js");` : ""}
|
||||
${
|
||||
saveStack
|
||||
? `globalThis.evaluationOrder.push("${output}/file${count}.js");`
|
||||
: ""
|
||||
}
|
||||
`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
@@ -1,24 +1,5 @@
|
||||
import { bench, run } from "mitata";
|
||||
import {
|
||||
cpus,
|
||||
endianness,
|
||||
arch,
|
||||
uptime,
|
||||
networkInterfaces,
|
||||
getPriority,
|
||||
totalmem,
|
||||
freemem,
|
||||
homedir,
|
||||
hostname,
|
||||
loadavg,
|
||||
platform,
|
||||
release,
|
||||
setPriority,
|
||||
tmpdir,
|
||||
type,
|
||||
userInfo,
|
||||
version,
|
||||
} from "node:os";
|
||||
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
|
||||
|
||||
bench("cpus()", () => cpus());
|
||||
bench("networkInterfaces()", () => networkInterfaces());
|
||||
|
||||
@@ -1,24 +1,5 @@
|
||||
import { bench, run } from "mitata";
|
||||
import {
|
||||
cpus,
|
||||
endianness,
|
||||
arch,
|
||||
uptime,
|
||||
networkInterfaces,
|
||||
getPriority,
|
||||
totalmem,
|
||||
freemem,
|
||||
homedir,
|
||||
hostname,
|
||||
loadavg,
|
||||
platform,
|
||||
release,
|
||||
setPriority,
|
||||
tmpdir,
|
||||
type,
|
||||
userInfo,
|
||||
version,
|
||||
} from "node:os";
|
||||
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
|
||||
|
||||
bench("cpus()", () => cpus());
|
||||
bench("networkInterfaces()", () => networkInterfaces());
|
||||
|
||||
1986
bench/package-lock.json
generated
Normal file
1986
bench/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,11 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"benchmark": "^2.1.4",
|
||||
"mitata": "^0.1.6",
|
||||
"esbuild": "^0.14.12",
|
||||
"eventemitter3": "^5.0.0",
|
||||
"mitata": "^0.1.6"
|
||||
"@swc/core": "^1.2.133",
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7"
|
||||
},
|
||||
"scripts": {
|
||||
"ffi": "cd ffi && bun run deps && bun run build && bun run bench",
|
||||
@@ -19,6 +17,5 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"fast-deep-equal": "^3.1.3"
|
||||
},
|
||||
"prettier": "../.prettierrc.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "react-hello-world",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "react-hello-world.node.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Colin McDonnell",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"react": "next",
|
||||
"react-dom": "next"
|
||||
}
|
||||
}
|
||||
@@ -18,8 +18,8 @@ const headers = {
|
||||
};
|
||||
|
||||
Deno.serve(
|
||||
async req => {
|
||||
async (req) => {
|
||||
return new Response(await renderToReadableStream(<App />), headers);
|
||||
},
|
||||
{ port: 8080 },
|
||||
{ port: 8080 }
|
||||
);
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
// to run this:
|
||||
// NODE_ENV=production bun react-hello-world.jsx
|
||||
// bun --jsx-production react-hello-world.jsx
|
||||
|
||||
// Make sure you're using react-dom@18.3.0 or later.
|
||||
// Currently that is available at react-dom@next (which is installed in this repository)
|
||||
import { renderToReadableStream } from "react-dom/server";
|
||||
// This will become the official react-dom/server.bun build a little later
|
||||
// It will be the default when you import from "react-dom/server"
|
||||
// That will work via the "bun" package.json export condition (which bun already supports)
|
||||
import { renderToReadableStream } from "../../test/bun.js/react-dom-server.bun";
|
||||
const headers = {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
|
||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user