mirror of
https://github.com/oven-sh/bun
synced 2026-02-19 23:31:45 +00:00
Compare commits
3 Commits
bun-v0.0.8
...
jarred/liv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62fb5ea9e3 | ||
|
|
a6ec858750 | ||
|
|
ba100fa9dc |
@@ -1,65 +0,0 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-dockerfile
|
||||
{
|
||||
"name": "bun (Ubuntu)",
|
||||
|
||||
// Sets the run context to one level up instead of the .devcontainer folder.
|
||||
"context": "..",
|
||||
|
||||
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
|
||||
"dockerFile": "../Dockerfile",
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/zsh",
|
||||
"zigLanguageClient.path": "/home/ubuntu/zls/zig-out/bin/zls",
|
||||
"zig.zigPath": "/build/zig/zig",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"AugusteRame.zls-vscode",
|
||||
"ms-vscode.cpptools",
|
||||
"/home/ubuntu/vscode-zig.vsix",
|
||||
"vadimcn.vscode-lldb",
|
||||
"esbenp.prettier-vscode",
|
||||
"xaver.clang-format"
|
||||
],
|
||||
"postCreateCommand": "cd /build/bun; bash /build/getting-started.sh; zsh",
|
||||
|
||||
"build": {
|
||||
"target": "bun.devcontainer",
|
||||
"cacheFrom": ["bun.devcontainer:latest"],
|
||||
"args": {}
|
||||
},
|
||||
"runArgs": [
|
||||
"--ulimit",
|
||||
"memlock=-1:-1",
|
||||
"--ulimit",
|
||||
"nofile=65536:65536",
|
||||
"--cap-add=SYS_PTRACE",
|
||||
"--security-opt",
|
||||
"seccomp=unconfined"
|
||||
],
|
||||
"workspaceMount": "source=bun,target=/build/bun,type=volume",
|
||||
"workspaceFolder": "/build/bun",
|
||||
"mounts": [
|
||||
"source=bun-install,target=/home/ubuntu/.bun,type=volume",
|
||||
"source=bun-config,target=/home/ubuntu/.config,type=volume"
|
||||
],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000, 8081, 8080]
|
||||
|
||||
// Uncomment the next line to run commands after the container is created - for example installing curl.
|
||||
// "postCreateCommand": "apt-get update && apt-get install -y curl",
|
||||
|
||||
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
|
||||
|
||||
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
|
||||
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
|
||||
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
// "remoteUser": "vscode"
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
# /etc/security/limits.conf
|
||||
#
|
||||
#Each line describes a limit for a user in the form:
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
#Where:
|
||||
#<domain> can be:
|
||||
# - a user name
|
||||
# - a group name, with @group syntax
|
||||
# - the wildcard *, for default entry
|
||||
# - the wildcard %, can be also used with %group syntax,
|
||||
# for maxlogin limit
|
||||
# - NOTE: group and wildcard limits are not applied to root.
|
||||
# To apply a limit to the root user, <domain> must be
|
||||
# the literal username root.
|
||||
#
|
||||
#<type> can have the two values:
|
||||
# - "soft" for enforcing the soft limits
|
||||
# - "hard" for enforcing hard limits
|
||||
#
|
||||
#<item> can be one of the following:
|
||||
# - core - limits the core file size (KB)
|
||||
# - data - max data size (KB)
|
||||
# - fsize - maximum filesize (KB)
|
||||
# - memlock - max locked-in-memory address space (KB)
|
||||
# - nofile - max number of open file descriptors
|
||||
# - rss - max resident set size (KB)
|
||||
# - stack - max stack size (KB)
|
||||
# - cpu - max CPU time (MIN)
|
||||
# - nproc - max number of processes
|
||||
# - as - address space limit (KB)
|
||||
# - maxlogins - max number of logins for this user
|
||||
# - maxsyslogins - max number of logins on the system
|
||||
# - priority - the priority to run user process with
|
||||
# - locks - max number of file locks the user can hold
|
||||
# - sigpending - max number of pending signals
|
||||
# - msgqueue - max memory used by POSIX message queues (bytes)
|
||||
# - nice - max nice priority allowed to raise to values: [-20, 19]
|
||||
# - rtprio - max realtime priority
|
||||
# - chroot - change root to directory (Debian-specific)
|
||||
#
|
||||
#<domain> <type> <item> <value>
|
||||
#
|
||||
|
||||
* soft memlock 33554432
|
||||
* hard memlock 33554432
|
||||
* soft nofile 33554432
|
||||
* hard nofile 33554432
|
||||
|
||||
#* soft core 0
|
||||
#root hard core 100000
|
||||
#* hard rss 10000
|
||||
#@student hard nproc 20
|
||||
#@faculty soft nproc 20
|
||||
#@faculty hard nproc 50
|
||||
#ftp hard nproc 0
|
||||
#ftp - chroot /ftp
|
||||
#@student - maxlogins 4
|
||||
|
||||
# End of file
|
||||
@@ -1,445 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
||||
|
||||
set -e
|
||||
|
||||
INSTALL_ZSH=${1:-"true"}
|
||||
USERNAME=${2:-"automatic"}
|
||||
USER_UID=${3:-"automatic"}
|
||||
USER_GID=${4:-"automatic"}
|
||||
UPGRADE_PACKAGES=${5:-"true"}
|
||||
INSTALL_OH_MYS=${6:-"true"}
|
||||
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
||||
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
||||
rm -f /etc/profile.d/00-restore-env.sh
|
||||
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" >/etc/profile.d/00-restore-env.sh
|
||||
chmod +x /etc/profile.d/00-restore-env.sh
|
||||
|
||||
# If in automatic mode, determine if a user already exists, if not use vscode
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} >/dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=vscode
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ]; then
|
||||
USERNAME=root
|
||||
USER_UID=0
|
||||
USER_GID=0
|
||||
fi
|
||||
|
||||
# Load markers to see which steps have already run
|
||||
if [ -f "${MARKER_FILE}" ]; then
|
||||
echo "Marker file found:"
|
||||
cat "${MARKER_FILE}"
|
||||
source "${MARKER_FILE}"
|
||||
fi
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Function to call apt-get if needed
|
||||
apt_get_update_if_needed() {
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
||||
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
||||
|
||||
package_list="apt-utils \
|
||||
openssh-client \
|
||||
gnupg2 \
|
||||
dirmngr \
|
||||
iproute2 \
|
||||
procps \
|
||||
lsof \
|
||||
htop \
|
||||
net-tools \
|
||||
psmisc \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
unzip \
|
||||
zip \
|
||||
nano \
|
||||
vim-tiny \
|
||||
less \
|
||||
jq \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
dialog \
|
||||
libc6 \
|
||||
libgcc1 \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
libicu[0-9][0-9] \
|
||||
liblttng-ust0 \
|
||||
libstdc++6 \
|
||||
zlib1g \
|
||||
locales \
|
||||
sudo \
|
||||
ncdu \
|
||||
man-db \
|
||||
strace \
|
||||
manpages \
|
||||
manpages-dev \
|
||||
init-system-helpers"
|
||||
|
||||
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
||||
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
||||
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
||||
. /etc/os-release
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
package_list="${package_list} manpages-posix manpages-posix-dev"
|
||||
else
|
||||
apt_get_update_if_needed
|
||||
fi
|
||||
|
||||
# Install libssl1.1 if available
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
||||
package_list="${package_list} libssl1.1"
|
||||
fi
|
||||
|
||||
# Install appropriate version of libssl1.0.x if available
|
||||
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
||||
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
||||
# Debian 9
|
||||
package_list="${package_list} libssl1.0.2"
|
||||
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
||||
# Ubuntu 18.04, 16.04, earlier
|
||||
package_list="${package_list} libssl1.0.0"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Packages to verify are installed: ${package_list}"
|
||||
apt-get -y install --no-install-recommends ${package_list} 2> >(grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2)
|
||||
|
||||
# Install git if not already installed (may be more recent than distro version)
|
||||
if ! type git >/dev/null 2>&1; then
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
PACKAGES_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Get to latest versions of all packages
|
||||
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y upgrade --no-install-recommends
|
||||
apt-get autoremove -y
|
||||
fi
|
||||
|
||||
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
||||
# Common need for both applications and things like the agnoster ZSH theme.
|
||||
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen >/dev/null; then
|
||||
echo "en_US.UTF-8 UTF-8" >>/etc/locale.gen
|
||||
locale-gen
|
||||
LOCALE_ALREADY_SET="true"
|
||||
fi
|
||||
|
||||
# Create or update a non-root user to match UID/GID.
|
||||
group_name="${USERNAME}"
|
||||
if id -u ${USERNAME} >/dev/null 2>&1; then
|
||||
# User exists, update if needed
|
||||
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
||||
group_name="$(id -gn $USERNAME)"
|
||||
groupmod --gid $USER_GID ${group_name}
|
||||
usermod --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
||||
usermod --uid $USER_UID $USERNAME
|
||||
fi
|
||||
else
|
||||
# Create user
|
||||
if [ "${USER_GID}" = "automatic" ]; then
|
||||
groupadd $USERNAME
|
||||
else
|
||||
groupadd --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" = "automatic" ]; then
|
||||
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
||||
else
|
||||
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add add sudo support for non-root user
|
||||
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
||||
echo $USERNAME ALL=\(root\) NOPASSWD:ALL >/etc/sudoers.d/$USERNAME
|
||||
chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
EXISTING_NON_ROOT_USER="${USERNAME}"
|
||||
fi
|
||||
|
||||
# ** Shell customization section **
|
||||
if [ "${USERNAME}" = "root" ]; then
|
||||
user_rc_path="/root"
|
||||
else
|
||||
user_rc_path="/home/${USERNAME}"
|
||||
fi
|
||||
|
||||
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ]; then
|
||||
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
||||
fi
|
||||
|
||||
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ]; then
|
||||
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
||||
fi
|
||||
|
||||
# .bashrc/.zshrc snippet
|
||||
rc_snippet="$(
|
||||
cat <<'EOF'
|
||||
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
||||
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
||||
# Display optional first run image specific notice if configured and terminal is interactive
|
||||
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
||||
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
||||
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
||||
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
||||
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
||||
fi
|
||||
mkdir -p "$HOME/.config/vscode-dev-containers"
|
||||
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
||||
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
||||
fi
|
||||
# Set the default git editor if not already set
|
||||
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
||||
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
||||
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
||||
export GIT_EDITOR="code-insiders --wait"
|
||||
else
|
||||
export GIT_EDITOR="code --wait"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
EOF
|
||||
)"
|
||||
|
||||
# code shim, it fallbacks to code-insiders if code is not available
|
||||
cat <<'EOF' >/usr/local/bin/code
|
||||
#!/bin/sh
|
||||
get_in_path_except_current() {
|
||||
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
||||
}
|
||||
code="$(get_in_path_except_current code)"
|
||||
if [ -n "$code" ]; then
|
||||
exec "$code" "$@"
|
||||
elif [ "$(command -v code-insiders)" ]; then
|
||||
exec code-insiders "$@"
|
||||
else
|
||||
echo "code or code-insiders is not installed" >&2
|
||||
exit 127
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/code
|
||||
|
||||
# systemctl shim - tells people to use 'service' if systemd is not running
|
||||
cat <<'EOF' >/usr/local/bin/systemctl
|
||||
#!/bin/sh
|
||||
set -e
|
||||
if [ -d "/run/systemd/system" ]; then
|
||||
exec /bin/systemctl/systemctl "$@"
|
||||
else
|
||||
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/systemctl
|
||||
|
||||
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
||||
codespaces_bash="$(
|
||||
cat \
|
||||
<<'EOF'
|
||||
# Codespaces bash prompt theme
|
||||
__bash_prompt() {
|
||||
local userpart='`export XIT=$? \
|
||||
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
||||
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
||||
local gitbranch='`\
|
||||
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
||||
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
||||
if [ "${BRANCH}" != "" ]; then \
|
||||
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
||||
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
||||
echo -n " \[\033[1;33m\]✗"; \
|
||||
fi \
|
||||
&& echo -n "\[\033[0;36m\]) "; \
|
||||
fi; \
|
||||
fi`'
|
||||
local lightblue='\[\033[1;34m\]'
|
||||
local removecolor='\[\033[0m\]'
|
||||
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
||||
unset -f __bash_prompt
|
||||
}
|
||||
__bash_prompt
|
||||
EOF
|
||||
)"
|
||||
|
||||
codespaces_zsh="$(
|
||||
cat \
|
||||
<<'EOF'
|
||||
# Codespaces zsh prompt theme
|
||||
__zsh_prompt() {
|
||||
local prompt_username
|
||||
if [ ! -z "${GITHUB_USER}" ]; then
|
||||
prompt_username="@${GITHUB_USER}"
|
||||
else
|
||||
prompt_username="%n"
|
||||
fi
|
||||
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
||||
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
||||
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
||||
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
||||
unset -f __zsh_prompt
|
||||
}
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
||||
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
||||
__zsh_prompt
|
||||
EOF
|
||||
)"
|
||||
|
||||
# Add RC snippet and custom bash prompt
|
||||
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >>/etc/bash.bashrc
|
||||
echo "${codespaces_bash}" >>"${user_rc_path}/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >>"${user_rc_path}/.bashrc"
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
echo "${codespaces_bash}" >>"/root/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >>"/root/.bashrc"
|
||||
fi
|
||||
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
||||
RC_SNIPPET_ALREADY_ADDED="true"
|
||||
fi
|
||||
|
||||
# Optionally install and configure zsh and Oh My Zsh!
|
||||
if [ "${INSTALL_ZSH}" = "true" ]; then
|
||||
if ! type zsh >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get install -y zsh
|
||||
fi
|
||||
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >>/etc/zsh/zshrc
|
||||
ZSH_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
||||
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
||||
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
||||
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
||||
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
||||
user_rc_file="${user_rc_path}/.zshrc"
|
||||
umask g-w,o-w
|
||||
mkdir -p ${oh_my_install_dir}
|
||||
git clone --depth=1 \
|
||||
-c core.eol=lf \
|
||||
-c core.autocrlf=false \
|
||||
-c fsck.zeroPaddedFilemode=ignore \
|
||||
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
||||
-c receive.fsck.zeroPaddedFilemode=ignore \
|
||||
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
||||
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" >${user_rc_file}
|
||||
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
||||
|
||||
mkdir -p ${oh_my_install_dir}/custom/themes
|
||||
echo "${codespaces_zsh}" >"${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
||||
# Shrink git while still enabling updates
|
||||
cd "${oh_my_install_dir}"
|
||||
git repack -a -d -f --depth=1 --window=1
|
||||
# Copy to non-root user if one is specified
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
||||
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Persist image metadata info, script if meta.env found in same directory
|
||||
meta_info_script="$(
|
||||
cat <<'EOF'
|
||||
#!/bin/sh
|
||||
. /usr/local/etc/vscode-dev-containers/meta.env
|
||||
# Minimal output
|
||||
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
||||
echo "${VERSION}"
|
||||
exit 0
|
||||
elif [ "$1" = "release" ]; then
|
||||
echo "${GIT_REPOSITORY_RELEASE}"
|
||||
exit 0
|
||||
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
||||
echo "${CONTENTS_URL}"
|
||||
exit 0
|
||||
fi
|
||||
#Full output
|
||||
echo
|
||||
echo "Development container image information"
|
||||
echo
|
||||
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
||||
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
||||
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
||||
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
||||
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
||||
echo
|
||||
EOF
|
||||
)"
|
||||
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
||||
mkdir -p /usr/local/etc/vscode-dev-containers/
|
||||
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
||||
echo "${meta_info_script}" >/usr/local/bin/devcontainer-info
|
||||
chmod +x /usr/local/bin/devcontainer-info
|
||||
fi
|
||||
|
||||
# Write marker file
|
||||
mkdir -p "$(dirname "${MARKER_FILE}")"
|
||||
echo -e "\
|
||||
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
||||
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
||||
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
||||
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
||||
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" >"${MARKER_FILE}"
|
||||
|
||||
echo "Done!"
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "To get started, login to GitHub and clone bun's GitHub repo into /workspaces/bun"
|
||||
echo "Make sure to login with a Personal Access Token"
|
||||
echo "# First time setup"
|
||||
echo "gh auth login"
|
||||
echo "gh repo clone Jarred-Sumner/bun . -- --depth=1 --progress -j8"
|
||||
echo ""
|
||||
echo "# Compile bun dependencies (zig is already compiled)"
|
||||
echo "make devcontainer"
|
||||
echo ""
|
||||
echo "# Build bun for development"
|
||||
echo "make dev"
|
||||
echo ""
|
||||
echo "# Run bun"
|
||||
echo "bun-debug"
|
||||
@@ -1,185 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./github-debian.sh [version]
|
||||
|
||||
CLI_VERSION=${1:-"latest"}
|
||||
|
||||
GITHUB_CLI_ARCHIVE_GPG_KEY=C99B11DEB97541F0
|
||||
GPG_KEY_SERVERS="keyserver hkp://keyserver.ubuntu.com:80
|
||||
keyserver hkps://keys.openpgp.org
|
||||
keyserver hkp://keyserver.pgp.com"
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get central common setting
|
||||
get_common_setting() {
|
||||
if [ "${common_settings_file_loaded}" != "true" ]; then
|
||||
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" -o /tmp/vsdc-settings.env 2>/dev/null || echo "Could not download settings file. Skipping."
|
||||
common_settings_file_loaded=true
|
||||
fi
|
||||
if [ -f "/tmp/vsdc-settings.env" ]; then
|
||||
local multi_line=""
|
||||
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
||||
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
||||
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
||||
fi
|
||||
echo "$1=${!1}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Figure out correct version of a three part version number is not passed
|
||||
find_version_from_git_tags() {
|
||||
local variable_name=$1
|
||||
local requested_version=${!variable_name}
|
||||
if [ "${requested_version}" = "none" ]; then return; fi
|
||||
local repository=$2
|
||||
local prefix=${3:-"tags/v"}
|
||||
local separator=${4:-"."}
|
||||
local last_part_optional=${5:-"false"}
|
||||
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
||||
local escaped_separator=${separator//./\\.}
|
||||
local last_part
|
||||
if [ "${last_part_optional}" = "true" ]; then
|
||||
last_part="(${escaped_separator}[0-9]+)?"
|
||||
else
|
||||
last_part="${escaped_separator}[0-9]+"
|
||||
fi
|
||||
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
||||
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
||||
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
||||
else
|
||||
set +e
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" >/dev/null 2>&1; then
|
||||
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "${variable_name}=${!variable_name}"
|
||||
}
|
||||
|
||||
# Import the specified key in a variable name passed in as
|
||||
receive_gpg_keys() {
|
||||
get_common_setting $1
|
||||
local keys=${!1}
|
||||
get_common_setting GPG_KEY_SERVERS true
|
||||
local keyring_args=""
|
||||
if [ ! -z "$2" ]; then
|
||||
keyring_args="--no-default-keyring --keyring $2"
|
||||
fi
|
||||
|
||||
# Use a temporary locaiton for gpg keys to avoid polluting image
|
||||
export GNUPGHOME="/tmp/tmp-gnupg"
|
||||
mkdir -p ${GNUPGHOME}
|
||||
chmod 700 ${GNUPGHOME}
|
||||
echo -e "disable-ipv6\n${GPG_KEY_SERVERS}" >${GNUPGHOME}/dirmngr.conf
|
||||
# GPG key download sometimes fails for some reason and retrying fixes it.
|
||||
local retry_count=0
|
||||
local gpg_ok="false"
|
||||
set +e
|
||||
until [ "${gpg_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; do
|
||||
echo "(*) Downloading GPG key..."
|
||||
(echo "${keys}" | xargs -n 1 gpg -q ${keyring_args} --recv-keys) 2>&1 && gpg_ok="true"
|
||||
if [ "${gpg_ok}" != "true" ]; then
|
||||
echo "(*) Failed getting key, retring in 10s..."
|
||||
((retry_count++))
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ "${gpg_ok}" = "false" ]; then
|
||||
echo "(!) Failed to get gpg key."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to run apt-get if needed
|
||||
apt_get_update_if_needed() {
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Checks if packages are installed and installs them if not
|
||||
check_packages() {
|
||||
if ! dpkg -s "$@" >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install curl, apt-transport-https, curl, gpg, or dirmngr, git if missing
|
||||
check_packages curl ca-certificates apt-transport-https dirmngr gnupg2
|
||||
if ! type git >/dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
# Soft version matching
|
||||
if [ "${CLI_VERSION}" != "latest" ] && [ "${CLI_VERSION}" != "lts" ] && [ "${CLI_VERSION}" != "stable" ]; then
|
||||
find_version_from_git_tags CLI_VERSION "https://github.com/cli/cli"
|
||||
version_suffix="=${CLI_VERSION}"
|
||||
else
|
||||
version_suffix=""
|
||||
fi
|
||||
|
||||
# Install the GitHub CLI
|
||||
echo "Downloading github CLI..."
|
||||
# Import key safely (new method rather than deprecated apt-key approach) and install
|
||||
. /etc/os-release
|
||||
receive_gpg_keys GITHUB_CLI_ARCHIVE_GPG_KEY /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages ${VERSION_CODENAME} main" >/etc/apt/sources.list.d/github-cli.list
|
||||
apt-get update
|
||||
apt-get -y install "gh${version_suffix}"
|
||||
rm -rf "/tmp/gh/gnupg"
|
||||
echo "Done!"
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
chsh -s $(which zsh)
|
||||
sh -c "$(curl -fsSL https://starship.rs/install.sh) -- --platform linux_musl"
|
||||
echo "eval \"$(starship init zsh)\"" >>~/.zshrc
|
||||
|
||||
curl https://github.com/Jarred-Sumner/vscode-zig/releases/download/fork-v1/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
curl -L https://github.com/Jarred-Sumner/vscode-zig/releases/download/fork-v1/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
|
||||
git clone https://github.com/zigtools/zls /home/ubuntu/zls
|
||||
cd /home/ubuntu/zls
|
||||
git submodule update --init --recursive --progress --depth=1
|
||||
zig build -Drelease-fast
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
// Source code
|
||||
"name": "bun",
|
||||
"path": "bun"
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"zig_exe_path": "/build/zig/zig",
|
||||
"enable_snippets": true,
|
||||
"warn_style": false,
|
||||
"enable_semantic_tokens": true,
|
||||
"operator_completions": true,
|
||||
"include_at_in_builtins": false,
|
||||
"max_detail_length": 1048576
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
docker login --username bunbunbunbun
|
||||
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-test-base --target bun-test-base . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base-with-zig-and-webkit --target bun-base-with-zig-and-webkit . --platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH
|
||||
docker build -f Dockerfile.base -t bunbunbunbun/bun-base --target bun-base --platform=linux/$BUILDARCH . --build-arg BUILDARCH=$BUILDARCH
|
||||
|
||||
docker push bunbunbunbun/bun-test-base:latest
|
||||
docker push bunbunbunbun/bun-base-with-zig-and-webkit:latest
|
||||
docker push bunbunbunbun/bun-base:latest
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
docker buildx build \
|
||||
-t bunbunbunbun/bun-test-base:latest -f Dockerfile.base \
|
||||
--target bun-test-base \
|
||||
--platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH .
|
||||
docker buildx build \
|
||||
--target bun-base \
|
||||
-f Dockerfile.base \
|
||||
-t bunbunbunbun/bun-base:latest --platform=linux/$BUILDARCH \
|
||||
--build-arg BUILDARCH=$BUILDARCH .
|
||||
docker buildx build \
|
||||
-t bunbunbunbun/bun-base-with-zig-and-webkit:latest \
|
||||
-f Dockerfile.base \
|
||||
--target bun-base-with-zig-and-webkit \
|
||||
--platform=linux/$BUILDARCH --build-arg BUILDARCH=$BUILDARCH .
|
||||
|
||||
docker push bunbunbunbun/bun-test-base:latest
|
||||
docker push bunbunbunbun/bun-base:latest
|
||||
docker push bunbunbunbun/bun-base-with-zig-and-webkit:latest
|
||||
1539
.docker/chrome.json
1539
.docker/chrome.json
File diff suppressed because it is too large
Load Diff
@@ -1,14 +0,0 @@
|
||||
# Note: 2 blank lines are required between entries
|
||||
Package: *
|
||||
Pin: release a=eoan
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: *
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 300
|
||||
|
||||
# Pattern includes 'chromium', 'chromium-browser' and similarly
|
||||
# named dependencies:
|
||||
Package: chromium*
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 700
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
name=$(openssl rand -hex 12)
|
||||
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
|
||||
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
|
||||
echo -e "bun-binary-$name"
|
||||
@@ -1,3 +0,0 @@
|
||||
deb http://deb.debian.org/debian buster main
|
||||
deb http://deb.debian.org/debian buster-updates main
|
||||
deb http://deb.debian.org/debian-security buster/updates main
|
||||
@@ -1,34 +0,0 @@
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
export BUILDKIT_ARCH=$(uname -m)
|
||||
export ARCH=${BUILDKIT_ARCH}
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
|
||||
echo "Unsupported platform: $BUILDKIT_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export BUILD_ID=$(cat build-id)
|
||||
export CONTAINER_NAME=bun-linux-$ARCH
|
||||
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
|
||||
export TEMP=/tmp/bun-0.0.$BUILD_ID
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
|
||||
|
||||
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest
|
||||
@@ -1,47 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
source "dockerfile-common.sh"
|
||||
|
||||
export $CONTAINER_NAME=$CONTAINER_NAME-local
|
||||
|
||||
rm -rf $TEMP
|
||||
mkdir -p $TEMP
|
||||
|
||||
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
|
||||
|
||||
if (($?)); then
|
||||
echo "Failed to build container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
id=$(docker create $CONTAINER_NAME:latest)
|
||||
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
|
||||
if (($?)); then
|
||||
echo "Failed to cp container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TEMP
|
||||
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
|
||||
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
|
||||
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
|
||||
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
|
||||
docker rm -v $id
|
||||
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
|
||||
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
|
||||
|
||||
case $(uname -s) in
|
||||
"Linux") target="linux" ;;
|
||||
*) target="other" ;;
|
||||
esac
|
||||
|
||||
if [ "$target" = "linux" ]; then
|
||||
if command -v bun --version >/dev/null; then
|
||||
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
|
||||
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Saved to:"
|
||||
echo $debug_abs
|
||||
echo $abs
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
bun install
|
||||
bun install --cwd ./integration/snippets
|
||||
bun install --cwd ./integration/scripts
|
||||
|
||||
make $BUN_TEST_NAME
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest
|
||||
@@ -10,8 +10,5 @@ examples
|
||||
|
||||
**/.next
|
||||
.git
|
||||
src/javascript/jsc/WebKit
|
||||
**/CMakeCache.txt
|
||||
packages/**/bun
|
||||
packages/**/bun-profile
|
||||
zig-cache
|
||||
src/javascript/jsc/WebKit/WebKitBuild
|
||||
**/CMakeCache.txt
|
||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
||||
.vscode/launch.json linguist-generated
|
||||
src/api/schema.d.ts linguist-generated
|
||||
src/api/schema.js linguist-generated
|
||||
src/javascript/jsc/bindings/sqlite/sqlite3.c linguist-vendored
|
||||
src/javascript/jsc/bindings/sqlite/sqlite3_local.h linguist-vendored
|
||||
*.lockb binary diff=lockb
|
||||
6
.github/workflows/bun-framework-next.yml
vendored
6
.github/workflows/bun-framework-next.yml
vendored
@@ -1,12 +1,8 @@
|
||||
name: bun-framework-next
|
||||
name: CI workflow for bun-framework-next
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main, bun-framework-next-actions]
|
||||
pull_request:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
|
||||
167
.github/workflows/bun.yml
vendored
167
.github/workflows/bun.yml
vendored
@@ -1,167 +0,0 @@
|
||||
name: bun
|
||||
on:
|
||||
push:
|
||||
branches: [main, bun-actions]
|
||||
paths-ignore:
|
||||
- "examples/**"
|
||||
- "bench/**"
|
||||
- "README.*"
|
||||
- "LICENSE"
|
||||
- ".vscode"
|
||||
- ".devcontainer"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "examples/**"
|
||||
- "bench/**"
|
||||
- README.*
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
runs-on: self-hosted
|
||||
name: "Integration tests"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Checkout submodules
|
||||
run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to Dockerhub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
- name: Pull Base Image
|
||||
run: bash .docker/pull.sh
|
||||
- name: Build tests
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
target: test_base
|
||||
tags: bun-test:latest
|
||||
load: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
- name: Run test-with-hmr
|
||||
env:
|
||||
BUN_TEST_NAME: test-with-hmr
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-no-hmr
|
||||
env:
|
||||
BUN_TEST_NAME: test-no-hmr
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-create-next
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-create-next
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-create-react
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-create-react
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-run
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-bun-run
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
- name: Run test-bun-install
|
||||
env:
|
||||
RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
BUN_TEST_NAME: test-bun-install
|
||||
GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
run: bash .docker/runner.sh
|
||||
# This is commented out because zig test does not work on the CI
|
||||
# Which sucks
|
||||
# zig-unit-tests:
|
||||
# runs-on: self-hosted
|
||||
# name: "Unit tests (Zig)"
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Checkout submodules
|
||||
# run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v1
|
||||
# - name: Login to Dockerhub
|
||||
# uses: docker/login-action@v1
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
# - name: Pull Base Image
|
||||
# run: bash .docker/pull.sh
|
||||
# - name: Build tests
|
||||
# uses: docker/build-push-action@v2
|
||||
# with:
|
||||
# context: .
|
||||
# target: build_unit
|
||||
# tags: bun-unit-tests:latest
|
||||
# load: true
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
# builder: ${{ steps.buildx.outputs.name }}
|
||||
# - name: Run tests
|
||||
# env:
|
||||
# GITHUB_WORKSPACE: $GITHUB_WORKSPACE
|
||||
# RUNNER_TEMP: ${RUNNER_TEMP}
|
||||
# run: bash .docker/unit-tests.sh
|
||||
release:
|
||||
runs-on: self-hosted
|
||||
needs: ["e2e"]
|
||||
if: github.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Checkout submodules
|
||||
run: git -c submodule."src/javascript/jsc/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j 8
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: jarredsumner
|
||||
password: ${{ secrets.DOCKERHUB_ALT }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
- name: Pull Base Image
|
||||
run: bash .docker/pull.sh
|
||||
- name: Build release image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
target: release
|
||||
tags: |
|
||||
ghcr.io/jarred-sumner/bun:${{github.sha}}
|
||||
ghcr.io/jarred-sumner/bun:edge
|
||||
jarredsumner/bun:${{github.sha}}
|
||||
jarredsumner/bun:edge
|
||||
platforms: |
|
||||
linux/amd64
|
||||
labels: |
|
||||
org.opencontainers.image.title=bun
|
||||
org.opencontainers.image.description=bun is a fast bundler, transpiler, JavaScript Runtime environment and package manager for web software. The image is an Ubuntu 20.04 image with bun preinstalled into /opt/bun.
|
||||
org.opencontainers.image.vendor=bun
|
||||
org.opencontainers.image.source=https://github.com/Jarred-Sumner/bun
|
||||
org.opencontainers.image.url=https://bun.sh
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -12,7 +12,6 @@ yarn.lock
|
||||
dist
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
@@ -78,21 +77,4 @@ misctools/fetch
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/tests.zig
|
||||
*.blob
|
||||
src/deps/s2n-tls
|
||||
.npm
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
misctools/sha
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
*.bc
|
||||
*.blob
|
||||
34
.gitmodules
vendored
34
.gitmodules
vendored
@@ -5,49 +5,23 @@
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/javascript/jsc/WebKit"]
|
||||
path = src/javascript/jsc/WebKit
|
||||
url = https://github.com/Jarred-Sumner/WebKit.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
url = https://github.com/microsoft/mimalloc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/zlib"]
|
||||
path = src/deps/zlib
|
||||
url = https://github.com/cloudflare/zlib.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/libarchive"]
|
||||
path = src/deps/libarchive
|
||||
url = https://github.com/libarchive/libarchive.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/boringssl"]
|
||||
path = src/deps/boringssl
|
||||
url = https://github.com/google/boringssl.git
|
||||
[submodule "src/deps/s2n-tls"]
|
||||
path = src/deps/s2n-tls
|
||||
url = https://github.com/Jarred-Sumner/s2n-tls
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/libbacktrace"]
|
||||
path = src/deps/libbacktrace
|
||||
url = https://github.com/ianlancetaylor/libbacktrace
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/lol-html"]
|
||||
path = src/deps/lol-html
|
||||
url = https://github.com/cloudflare/lol-html
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/uws"]
|
||||
path = src/deps/uws
|
||||
url = https://github.com/Jarred-Sumner/uWebSockets
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
[submodule "src/deps/tinycc"]
|
||||
path = src/deps/tinycc
|
||||
url = https://github.com/Jarred-Sumner/tinycc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(git rev-parse HEAD:./src/javascript/jsc/WebKit)
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
8
.vscode/extensions.json
vendored
8
.vscode/extensions.json
vendored
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"AugusteRame.zls-vscode",
|
||||
"esbenp.prettier-vscode",
|
||||
"xaver.clang-format",
|
||||
"vadimcn.vscode-lldb"
|
||||
]
|
||||
}
|
||||
327
.vscode/launch.json
vendored
327
.vscode/launch.json
vendored
@@ -1,84 +1,12 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "HTTP bench",
|
||||
"program": "${workspaceFolder}/misctools/http_bench",
|
||||
"args": ["https://twitter.com", "--count=100"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://example.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #2",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://twitter.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #13w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["http://127.0.0.1:8080/next.json", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #12w",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://registry.npmjs.org/next", "--quiet", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "hop",
|
||||
"program": "hop",
|
||||
"args": ["swc-linux-arm64-musl-12.0.3.tgz"],
|
||||
"cwd": "/Users/jarred/Downloads/hop-test",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "hop extract",
|
||||
"program": "hop",
|
||||
"args": ["swc-linux-arm64-musl-12.0.3.hop"],
|
||||
"cwd": "/Users/jarred/Downloads/hop-test",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #2",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["https://registry.npmjs.org/react", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "fetch debug #3",
|
||||
"program": "${workspaceFolder}/misctools/fetch",
|
||||
"args": ["http://example.com/", "--verbose"],
|
||||
"args": ["https://api.github.com/repos/hanford/trends/tarball"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -87,17 +15,8 @@
|
||||
"request": "launch",
|
||||
"name": "bun create debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["create", "next", "foo", "--open", "--force"],
|
||||
"cwd": "/tmp",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun dev debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["dev"],
|
||||
"cwd": "/tmp/foo",
|
||||
"args": ["create", "hanford/trends", "foo"],
|
||||
"cwd": "/tmp/",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -105,9 +24,8 @@
|
||||
"request": "launch",
|
||||
"name": "bun run debug",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "/tmp/bar.js"],
|
||||
// "args": ["--version"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["paoskdpoasdk"],
|
||||
"cwd": "/tmp/",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -220,35 +138,46 @@
|
||||
"program": "bun-debug",
|
||||
// "args": ["--serve", "--origin=http://localhost:3000"],
|
||||
"args": ["dev", "--origin=http://localhost:3000"],
|
||||
"cwd": "/tmp/next-app",
|
||||
"cwd": "${workspaceFolder}/examples/hello-next",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Demo ",
|
||||
"program": "bun-debug",
|
||||
"name": "Demo Serve aarch64",
|
||||
"program": "${workspaceFolder}/build/macos-aarch64/bun",
|
||||
// "args": ["--serve", "--origin=http://localhost:3000"],
|
||||
"args": ["/Users/jarred/Desktop/text-encoder-hello.js"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["dev", "--origin=http://ci.local:3000"],
|
||||
"cwd": "${workspaceFolder}/examples/hello-next",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "wiptest",
|
||||
"name": "Demo Lazy Build",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest", "transpiler"],
|
||||
"cwd": "${workspaceFolder}/integration",
|
||||
"args": [
|
||||
"./src/index.tsx",
|
||||
"--resolve=lazy",
|
||||
"--origin=http://localhost:9000/"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/examples/simple-react",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "debug test",
|
||||
"name": "Demo Build",
|
||||
"program": "bun-debug",
|
||||
"args": ["build", "/tmp/foo.ts"],
|
||||
"cwd": "/tmp",
|
||||
"args": [
|
||||
"./src/index.tsx",
|
||||
"--resolve=dev",
|
||||
"--outdir=outcss",
|
||||
"--platform=browser",
|
||||
"--origin=http://localhost:9000/"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/examples/css-stress-test",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
@@ -256,110 +185,19 @@
|
||||
"request": "launch",
|
||||
"name": "Dazzle serve",
|
||||
"program": "bun-debug",
|
||||
"args": ["dev"],
|
||||
"args": ["--origin=http://localhost:5000", "--disable-hmr"],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Dazzle bun",
|
||||
"name": "Bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun"],
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run",
|
||||
"program": "bun-debug",
|
||||
"args": ["cat.js", "./node_modules/@babel/standalone/babel.js"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run callback bench",
|
||||
"program": "bun-debug",
|
||||
"args": ["/Users/jarred/Code/bun/bench/snippets/callbacks-overhead.mjs"],
|
||||
"cwd": "/Users/jarred/Build/foobar",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest"],
|
||||
"cwd": "${workspaceFolder}/integration",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun test current",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest", "mmap"],
|
||||
"cwd": "${workspaceFolder}/integration",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun http example",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "examples/bun/http.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun http file example",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "examples/bun/http-file.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun html-rewriter example",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "examples/bun/html-rewriter.ts"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun tes2t",
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "imagemin-webpack-plugin"],
|
||||
"cwd": "/tmp/testfoo",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -443,7 +281,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context bun Bug",
|
||||
"name": "Context Bun Bug",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "./code.js"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -452,7 +290,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Context bun",
|
||||
"name": "Context Bun",
|
||||
"program": "bun-debug",
|
||||
"args": ["bun", "--use=next"],
|
||||
"cwd": "/Users/jarred/Build/context/www",
|
||||
@@ -461,7 +299,7 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun-hello",
|
||||
"name": "Bun-hello",
|
||||
"program": "bun-debug",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}/packages/bun-hello",
|
||||
@@ -476,87 +314,6 @@
|
||||
"cwd": "${workspaceFolder}/integration/snippets",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun.sh dev",
|
||||
"program": "bun-debug",
|
||||
"args": ["dev"],
|
||||
"cwd": "${workspaceFolder}/../bun.sh",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Instal 1l",
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "-g", "which"],
|
||||
"cwd": "/tmp/new-app",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Instal 32",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "/tmp/foo",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "--backend=clonefile", "--force"],
|
||||
"cwd": "/Users/jarred/Build/octokit-test",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Install #2",
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "typescript"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Add",
|
||||
"program": "bun-debug",
|
||||
"args": ["add", "react"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Remove",
|
||||
"program": "bun-debug",
|
||||
"args": ["remove", "foo"],
|
||||
"cwd": "/Users/jarred/Build/athena.yarn",
|
||||
"env": {},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Print Lockfile",
|
||||
"program": "bun-debug",
|
||||
"args": ["./bun.lockb"],
|
||||
"cwd": "/tmp/wow-such-npm",
|
||||
"env": {
|
||||
"BUN_CONFIG_SKIP_SAVE_LOCKFILE": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -575,6 +332,15 @@
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Fixtures Rel",
|
||||
"program": "${workspaceFolder}/build/macos-x86_64/bun",
|
||||
"args": ["dev"],
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
|
||||
@@ -669,6 +435,15 @@
|
||||
"cwd": "${workspaceFolder}/src/test/fixtures",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"name": "esbuild",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "/Users/jarred/Code/esbuild/cmd/esbuild",
|
||||
"cwd": "/Users/jarred/Code/bun/src/test/fixtures",
|
||||
"args": ["--bundle", "--outfile=out.esbuild.js", "await.ts"]
|
||||
},
|
||||
|
||||
// {
|
||||
// "type": "lldb",
|
||||
|
||||
26
.vscode/settings.json
vendored
26
.vscode/settings.json
vendored
@@ -8,29 +8,11 @@
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
"[zig]": {
|
||||
"editor.defaultFormatter": "AugusteRame.zls-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[ts]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[js]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[jsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[tsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
"editor.defaultFormatter": "tiehuis.zig"
|
||||
},
|
||||
"lldb.verboseLogging": true,
|
||||
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
|
||||
"zig.testCmd": "make test ${file} ${filter} ${bin}",
|
||||
|
||||
"lldb.verboseLogging": false,
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.svn": true,
|
||||
@@ -42,7 +24,6 @@
|
||||
"**/*.xcscheme": true,
|
||||
"**/*.pem": true,
|
||||
"**/*.xcodeproj": true,
|
||||
"packages/bun-types/*.d.ts": true,
|
||||
|
||||
"integration/snapshots": true,
|
||||
"integration/snapshots-no-hmr": true,
|
||||
@@ -50,11 +31,8 @@
|
||||
"src/deps/libarchive": true,
|
||||
"src/deps/mimalloc": true,
|
||||
"src/deps/s2n-tls": true,
|
||||
"src/deps/boringssl": true,
|
||||
"src/deps/openssl": true,
|
||||
"src/deps/uws": true,
|
||||
"src/deps/zlib": true,
|
||||
"src/deps/lol-html": true,
|
||||
"integration/snippets/package-json-exports/_node_modules_copy": true
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
|
||||
25
.vscode/tasks.json
vendored
25
.vscode/tasks.json
vendored
@@ -29,6 +29,31 @@
|
||||
"showReuseMessage": false,
|
||||
"clear": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"type": "shell",
|
||||
"command": "zig",
|
||||
"args": [
|
||||
"test",
|
||||
"${file}",
|
||||
"--main-pkg-path",
|
||||
"${workspaceFolder}",
|
||||
"-femit-bin=${workspaceFolder}/zig-out/bin/test",
|
||||
";",
|
||||
"true"
|
||||
],
|
||||
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"panel": "new",
|
||||
"reveal": "always"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
447
Dockerfile
447
Dockerfile
@@ -1,447 +0,0 @@
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as lolhtml
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
|
||||
|
||||
RUN cd ${BUN_DIR} && \
|
||||
make lolhtml && rm -rf src/deps/lol-html Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as mimalloc
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
RUN cd ${BUN_DIR} && \
|
||||
make mimalloc && rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as zlib
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make zlib && rm -rf src/deps/zlib Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as libarchive
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make libarchive && rm -rf src/deps/libarchive Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as tinycc
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/tinycc ${BUN_DIR}/src/deps/tinycc
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make tinycc && rm -rf src/deps/tinycc Makefile
|
||||
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as libbacktrace
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libbacktrace ${BUN_DIR}/src/deps/libbacktrace
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make libbacktrace && rm -rf src/deps/libbacktrace Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as boringssl
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN make boringssl && rm -rf src/deps/boringssl Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as base64
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/base64 ${BUN_DIR}/src/base64
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN make base64 && rm -rf src/base64 Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as uws
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/uws ${BUN_DIR}/src/deps/uws
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
|
||||
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
|
||||
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make uws && rm -rf src/deps/uws Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base:latest as picohttp
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/picohttpparser ${BUN_DIR}/src/deps/picohttpparser
|
||||
COPY src/deps/*.c ${BUN_DIR}/src/deps/
|
||||
COPY src/deps/*.h ${BUN_DIR}/src/deps/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make picohttp
|
||||
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as identifier_cache
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig
|
||||
COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make identifier-cache && rm -rf zig-cache Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as node_fallbacks
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks
|
||||
RUN cd $BUN_DIR && \
|
||||
make node-fallbacks && rm -rf src/node-fallbacks/node_modules Makefile
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as prepare_release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY ./src ${BUN_DIR}/src
|
||||
COPY ./build.zig ${BUN_DIR}/build.zig
|
||||
COPY ./completions ${BUN_DIR}/completions
|
||||
COPY ./packages ${BUN_DIR}/packages
|
||||
COPY ./build-id ${BUN_DIR}/build-id
|
||||
COPY ./package.json ${BUN_DIR}/package.json
|
||||
COPY ./misctools ${BUN_DIR}/misctools
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
|
||||
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
|
||||
|
||||
WORKDIR ${BUN_DIR}
|
||||
|
||||
|
||||
FROM prepare_release as build_release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && rm -rf $HOME/.cache zig-cache && make \
|
||||
jsc-bindings-headers \
|
||||
api \
|
||||
analytics \
|
||||
bun_error \
|
||||
fallback_decoder && rm -rf $HOME/.cache zig-cache && \
|
||||
mkdir -p $BUN_RELEASE_DIR && \
|
||||
make jsc-bindings-mac -j10 && \
|
||||
make sqlite release copy-to-bun-release-dir && \
|
||||
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages
|
||||
|
||||
FROM prepare_release as build_unit
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
|
||||
CMD make jsc-bindings-headers \
|
||||
api \
|
||||
analytics \
|
||||
bun_error \
|
||||
fallback_decoder \
|
||||
jsc-bindings-mac -j10 && \
|
||||
make \
|
||||
run-all-unit-tests
|
||||
|
||||
FROM bunbunbunbun/bun-base-with-zig-and-webkit:latest as bun.devcontainer
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
|
||||
ENV PATH "$ZIG_PATH:$PATH"
|
||||
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
|
||||
ENV LIB_ICU_PATH ${GITHUB_WORKSPACE}/icu/source/lib
|
||||
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
|
||||
ENV PATH "${GITHUB_WORKSPACE}/packages/bun-linux-x64:${GITHUB_WORKSPACE}/packages/bun-linux-aarch64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-x64:${GITHUB_WORKSPACE}/packages/debug-bun-linux-aarch64:$PATH"
|
||||
ENV PATH "/home/ubuntu/zls/zig-out/bin:$PATH"
|
||||
|
||||
ENV BUN_INSTALL /home/ubuntu/.bun
|
||||
ENV XDG_CONFIG_HOME /home/ubuntu/.config
|
||||
|
||||
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-13 90
|
||||
|
||||
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
COPY ".devcontainer/scripts/" /scripts/
|
||||
COPY ".devcontainer/scripts/getting-started.sh" $GITHUB_WORKSPACE/getting-started.sh
|
||||
RUN mkdir -p /home/ubuntu/.bun /home/ubuntu/.config $GITHUB_WORKSPACE/bun && \
|
||||
bash /scripts/common-debian.sh && \
|
||||
bash /scripts/github.sh && \
|
||||
bash /scripts/nice.sh && \
|
||||
bash /scripts/zig-env.sh
|
||||
COPY .devcontainer/zls.json /home/ubuntu/.config/zls.json
|
||||
|
||||
FROM ubuntu:20.04 as release_with_debug_info
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun-profile /opt/bun/bin/bun-profile
|
||||
|
||||
WORKDIR /opt/bun
|
||||
|
||||
ENTRYPOINT [ "/opt/bun/bin/bun" ]
|
||||
|
||||
FROM ubuntu:20.04 as release
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
COPY .devcontainer/limits.conf /etc/security/limits.conf
|
||||
|
||||
ENV BUN_INSTALL /opt/bun
|
||||
ENV PATH "/opt/bun/bin:$PATH"
|
||||
ARG BUILDARCH=amd64
|
||||
LABEL org.opencontainers.image.title="bun ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /opt/bun/bin/bun
|
||||
WORKDIR /opt/bun
|
||||
|
||||
ENTRYPOINT [ "/opt/bun/bin/bun" ]
|
||||
|
||||
|
||||
FROM bunbunbunbun/bun-test-base as test_base
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
ARG BUILDARCH=amd64
|
||||
RUN groupadd -r chromium && useradd -d ${BUN_DIR} -M -r -g chromium -G audio,video chromium \
|
||||
&& mkdir -p /home/chromium/Downloads && chown -R chromium:chromium /home/chromium
|
||||
|
||||
USER chromium
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ENV NPM_CLIENT bun
|
||||
ENV PATH "${BUN_DIR}/packages/bun-linux-x64:${BUN_DIR}/packages/bun-linux-aarch64:$PATH"
|
||||
ENV CI 1
|
||||
ENV BROWSER_EXECUTABLE /usr/bin/chromium
|
||||
|
||||
COPY ./integration ${BUN_DIR}/integration
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY package.json ${BUN_DIR}/package.json
|
||||
COPY .docker/run-test.sh ${BUN_DIR}/run-test.sh
|
||||
COPY ./bun.lockb ${BUN_DIR}/bun.lockb
|
||||
|
||||
# # We don't want to worry about architecture differences in this image
|
||||
COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-aarch64/bun
|
||||
COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-x64/bun
|
||||
|
||||
USER root
|
||||
RUN chgrp -R chromium ${BUN_DIR} && chmod g+rwx ${BUN_DIR} && chown -R chromium:chromium ${BUN_DIR}
|
||||
USER chromium
|
||||
|
||||
CMD [ "bash", "run-test.sh" ]
|
||||
|
||||
FROM release
|
||||
52
Dockerfile-zig
Normal file
52
Dockerfile-zig
Normal file
@@ -0,0 +1,52 @@
|
||||
FROM ubuntu:latest
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common
|
||||
RUN curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -
|
||||
|
||||
RUN wget https://apt.llvm.org/llvm.sh --no-check-certificate
|
||||
RUN chmod +x llvm.sh
|
||||
RUN ./llvm.sh 12
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
cmake \
|
||||
build-essential \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-12-dev \
|
||||
libclang-12-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
npm \
|
||||
clang-12 \
|
||||
clang-format-12 \
|
||||
libc++-12-dev \
|
||||
libc++abi-12-dev \
|
||||
lld-12 \
|
||||
libicu-dev
|
||||
|
||||
RUN update-alternatives --install /usr/bin/ld ld /usr/bin/lld-12 90 && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-12 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-12 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-12 90
|
||||
|
||||
|
||||
ENV CC=clang-12
|
||||
ENV CXX=clang++-12
|
||||
|
||||
# Compile zig
|
||||
RUN mkdir -p /home/ubuntu/zig; cd /home/ubuntu; git clone https://github.com/jarred-sumner/zig.git; cd /home/ubuntu/zig && git checkout jarred/zig-sloppy-with-small-structs && cmake . -DCMAKE_BUILD_TYPE=Release && make -j$(nproc)
|
||||
|
||||
ENV PATH="/home/ubuntu/zig:$PATH"
|
||||
|
||||
RUN npm install -g esbuild
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
147
Dockerfile.base
147
Dockerfile.base
@@ -1,147 +0,0 @@
|
||||
FROM ubuntu:20.04 as bun-base-with-args
|
||||
|
||||
FROM bun-base-with-args as bun-base
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
|
||||
WORKDIR ${GITHUB_WORKSPACE}
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -y wget gnupg2 curl lsb-release wget software-properties-common && \
|
||||
add-apt-repository ppa:longsleep/golang-backports && \
|
||||
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
|
||||
chmod +x llvm.sh && \
|
||||
./llvm.sh 13 && \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
cmake \
|
||||
build-essential \
|
||||
git \
|
||||
libssl-dev \
|
||||
ruby \
|
||||
liblld-13-dev \
|
||||
libclang-13-dev \
|
||||
nodejs \
|
||||
gcc \
|
||||
g++ \
|
||||
npm \
|
||||
clang-13 \
|
||||
clang-format-13 \
|
||||
libc++-13-dev \
|
||||
libc++abi-13-dev \
|
||||
lld-13 \
|
||||
libicu-dev \
|
||||
wget \
|
||||
rustc \
|
||||
cargo \
|
||||
unzip \
|
||||
tar \
|
||||
golang-go ninja-build pkg-config automake autoconf libtool curl && \
|
||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-13 90 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-13 90 && \
|
||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-13 90 && \
|
||||
npm install -g esbuild
|
||||
|
||||
ENV CC=clang-13
|
||||
ENV CXX=clang++-13
|
||||
|
||||
|
||||
ARG BUILDARCH=amd64
|
||||
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
ENV WEBKIT_OUT_DIR ${WEBKIT_DIR}
|
||||
|
||||
ENV JSC_BASE_DIR $WEBKIT_OUT_DIR
|
||||
ENV LIB_ICU_PATH ${GITHUB_WORKSPACE}/icu/source/lib
|
||||
ENV BUN_RELEASE_DIR ${BUN_RELEASE_DIR}
|
||||
ENV BUN_DEPS_OUT_DIR ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
LABEL org.opencontainers.image.title="bun base image ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
FROM bun-base as bun-base-with-zig-and-webkit
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
ARG BUILDARCH=amd64
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/Jarred-Sumner/zig/releases/download/mar4/zig-linux-$BUILDARCH.zip && \
|
||||
unzip -q zig-linux-$BUILDARCH.zip && \
|
||||
rm zig-linux-$BUILDARCH.zip;
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/Jarred-Sumner/WebKit/releases/download/May8/bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
|
||||
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
|
||||
|
||||
RUN cd $GITHUB_WORKSPACE && \
|
||||
curl -o icu4c-66_1-src.tgz -L https://github.com/unicode-org/icu/releases/download/release-66-1/icu4c-66_1-src.tgz && \
|
||||
tar -xzf icu4c-66_1-src.tgz && \
|
||||
rm icu4c-66_1-src.tgz && \
|
||||
cd icu/source && \
|
||||
./configure --enable-static --disable-shared && \
|
||||
make -j$(nproc)
|
||||
|
||||
ENV ZIG "${ZIG_PATH}/zig"
|
||||
|
||||
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
|
||||
LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
|
||||
FROM debian:bullseye-slim as bun-test-base
|
||||
# Original creator:
|
||||
# LABEL maintainer "Jessie Frazelle <jess@linux.com>"
|
||||
|
||||
# Install Chromium
|
||||
# Yes, including the Google API Keys sucks but even debian does the same: https://packages.debian.org/stretch/amd64/chromium/filelist
|
||||
RUN apt-get update && apt-get install -y \
|
||||
chromium \
|
||||
chromium-l10n \
|
||||
fonts-liberation \
|
||||
fonts-roboto \
|
||||
hicolor-icon-theme \
|
||||
libcanberra-gtk-module \
|
||||
libexif-dev \
|
||||
libgl1-mesa-dri \
|
||||
libgl1-mesa-glx \
|
||||
libpangox-1.0-0 \
|
||||
libv4l-0 \
|
||||
fonts-symbola \
|
||||
bash \
|
||||
make \
|
||||
psmisc \
|
||||
curl \
|
||||
--no-install-recommends \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /etc/chromium.d/ \
|
||||
&& /bin/echo -e 'export GOOGLE_API_KEY="AIzaSyCkfPOPZXDKNn8hhgu3JrA62wIgC93d44k"\nexport GOOGLE_DEFAULT_CLIENT_ID="811574891467.apps.googleusercontent.com"\nexport GOOGLE_DEFAULT_CLIENT_SECRET="kdloedMFGdGla2P1zacGjAQh"' > /etc/chromium.d/googleapikeys && \
|
||||
curl -L https://deb.nodesource.com/setup_16.x | bash - && \
|
||||
apt-get update && \
|
||||
apt-get install -y nodejs npm
|
||||
|
||||
139
Dockerfile.musl
139
Dockerfile.musl
@@ -1,139 +0,0 @@
|
||||
|
||||
# This doesn't work
|
||||
# Specifically: there are a number of crashes and segfaults when using musl
|
||||
# The cause is likely related to differences in pthreads implementations
|
||||
# It is not just the stack size thing. It's something more complicated and importantly
|
||||
# There was no meaningful file size difference between musl and glibc
|
||||
|
||||
|
||||
# ARG BUILDARCH=aarch64
|
||||
# ARG zig_base_image=ghcr.io/jarred-sumner/zig-linux-musl-${BUILDARCH}
|
||||
# ARG webkit_base_image=ghcr.io/jarred-sumner/bun-webkit-musl-${BUILDARCH}
|
||||
# FROM ${zig_base_image}:latest AS zig
|
||||
# FROM ${webkit_base_image}:latest AS webkit
|
||||
|
||||
# FROM zig as bun_base
|
||||
|
||||
# COPY --from=webkit /webkit /webkit
|
||||
|
||||
# ENV PATH "/zig/bin:$PATH"
|
||||
# ENV JSC_BASE_DIR=/webkit
|
||||
# ENV LIB_ICU_PATH=/webkit/lib
|
||||
# ENV BUN_DEPS_OUT_DIR /bun-deps
|
||||
# ENV STATIC_MUSL_FLAG=-static
|
||||
# ENV MIMALLOC_OVERRIDE_FLAG="-DMI_OVERRIDE=OFF"
|
||||
|
||||
# RUN apk add --no-cache nodejs npm go libtool autoconf pkgconfig automake ninja
|
||||
# RUN mkdir -p $BUN_DEPS_OUT_DIR;
|
||||
|
||||
# WORKDIR /bun
|
||||
# COPY Makefile /bun/Makefile
|
||||
|
||||
# FROM bun_base as mimalloc
|
||||
|
||||
# COPY src/deps/mimalloc /bun/src/deps/mimalloc
|
||||
|
||||
# RUN make mimalloc;
|
||||
|
||||
# FROM bun_base as zlib
|
||||
|
||||
# COPY src/deps/zlib /bun/src/deps/zlib
|
||||
|
||||
# RUN make zlib;
|
||||
|
||||
# FROM bun_base as libarchive
|
||||
|
||||
# COPY src/deps/libarchive /bun/src/deps/libarchive
|
||||
|
||||
# RUN make libarchive;
|
||||
|
||||
# FROM bun_base as boringssl
|
||||
|
||||
# COPY src/deps/boringssl /bun/src/deps/boringssl
|
||||
|
||||
# RUN make boringssl;
|
||||
|
||||
# FROM bun_base as picohttp
|
||||
|
||||
# COPY src/deps/picohttpparser /bun/src/deps/picohttpparser
|
||||
# COPY src/deps/*.c /bun/src/deps/
|
||||
# COPY src/deps/*.h /bun/src/deps/
|
||||
|
||||
# RUN make picohttp
|
||||
|
||||
# FROM bun_base as identifier_cache
|
||||
|
||||
# COPY src/js_lexer/identifier_data.zig /bun/src/js_lexer/identifier_data.zig
|
||||
# COPY src/js_lexer/identifier_cache.zig /bun/src/js_lexer/identifier_cache.zig
|
||||
|
||||
# RUN make identifier-cache
|
||||
|
||||
# FROM bun_base as node_fallbacks
|
||||
|
||||
# COPY src/node-fallbacks /bun/src/node-fallbacks
|
||||
# RUN make node-fallbacks
|
||||
|
||||
# FROM bun_base as prebuild
|
||||
|
||||
# WORKDIR /bun
|
||||
|
||||
# COPY ./src /bun/src
|
||||
# COPY ./build.zig /bun/build.zig
|
||||
# COPY ./completions /bun/completions
|
||||
# COPY ./packages /bun/packages
|
||||
# COPY ./build-id /bun/build-id
|
||||
# COPY ./package.json /bun/package.json
|
||||
# COPY ./misctools /bun/misctools
|
||||
|
||||
# COPY --from=mimalloc /bun-deps/*.o /bun-deps
|
||||
# COPY --from=libarchive /bun-deps/*.a /bun-deps
|
||||
# COPY --from=picohttp /bun-deps/*.o /bun-deps
|
||||
# COPY --from=boringssl /bun-deps/*.a /bun-deps
|
||||
# COPY --from=zlib /bun-deps/*.a /bun-deps
|
||||
# COPY --from=node_fallbacks /bun/src/node-fallbacks /bun/src/node-fallbacks
|
||||
# COPY --from=identifier_cache /bun/src/js_lexer/*.blob /bun/src/js_lexer/
|
||||
|
||||
# ENV ICU_FLAGS="-I/webkit/include/wtf $ICU_FLAGS"
|
||||
|
||||
# RUN apk add --no-cache chromium && npm install -g esbuild && make \
|
||||
# jsc-bindings-headers \
|
||||
# api \
|
||||
# analytics \
|
||||
# bun_error \
|
||||
# fallback_decoder
|
||||
|
||||
|
||||
|
||||
# FROM prebuild as release
|
||||
|
||||
# ENV BUN_RELEASE_DIR /opt/bun
|
||||
|
||||
# ENV LIB_ICU_PATH /usr/lib
|
||||
|
||||
# RUN apk add icu-static icu-dev && mkdir -p $BUN_RELEASE_DIR; make release \
|
||||
# copy-to-bun-release-dir
|
||||
|
||||
# FROM alpine:3.15 as bun
|
||||
|
||||
# COPY --from=release /opt/bun/bun /opt/bun/bin/bun
|
||||
# ENV BUN_INSTALL /opt/bun
|
||||
# ENV PATH /opt/bun/bin:$PATH
|
||||
|
||||
# LABEL org.opencontainers.image.title="bun - Linux ${BUILDARCH} (musl)"
|
||||
# LABEL org.opencontainers.image.source=https://github.com/jarred-sumner/bun
|
||||
|
||||
# FROM release as test
|
||||
|
||||
# ENV PATH /opt/bun/bin:$PATH
|
||||
# ENV PATH /bun/packages/bun-linux-aarch64:/bun/packages/bun-linux-x64:$PATH
|
||||
# ENV BUN_INSTALL /opt/bun
|
||||
|
||||
# WORKDIR /bun
|
||||
|
||||
# COPY ./integration /bun/integration
|
||||
# COPY ./integration/snippets/package-json-exports/_node_modules_copy /bun/integration/snippets/package-json-exports/_node_modules_copy
|
||||
# CMD [ "bash", "-c", "npm install && cd /bun/integration/snippets && npm install && cd /bun && make copy-test-node-modules test-all"]
|
||||
|
||||
|
||||
# # FROM bun
|
||||
|
||||
@@ -1,419 +0,0 @@
|
||||
import {
|
||||
viewSource,
|
||||
dlopen,
|
||||
CString,
|
||||
ptr,
|
||||
toBuffer,
|
||||
toArrayBuffer,
|
||||
FFIType,
|
||||
callback,
|
||||
} from "bun:ffi";
|
||||
import { bench, group, run } from "mitata";
|
||||
|
||||
const types = {
|
||||
returns_true: {
|
||||
returns: "bool",
|
||||
args: [],
|
||||
},
|
||||
returns_false: {
|
||||
returns: "bool",
|
||||
args: [],
|
||||
},
|
||||
returns_42_char: {
|
||||
returns: "char",
|
||||
args: [],
|
||||
},
|
||||
// returns_42_float: {
|
||||
// returns: "float",
|
||||
// args: [],
|
||||
// },
|
||||
// returns_42_double: {
|
||||
// returns: "double",
|
||||
// args: [],
|
||||
// },
|
||||
returns_42_uint8_t: {
|
||||
returns: "uint8_t",
|
||||
args: [],
|
||||
},
|
||||
returns_neg_42_int8_t: {
|
||||
returns: "int8_t",
|
||||
args: [],
|
||||
},
|
||||
returns_42_uint16_t: {
|
||||
returns: "uint16_t",
|
||||
args: [],
|
||||
},
|
||||
returns_42_uint32_t: {
|
||||
returns: "uint32_t",
|
||||
args: [],
|
||||
},
|
||||
// // returns_42_uint64_t: {
|
||||
// // returns: "uint64_t",
|
||||
// // args: [],
|
||||
// // },
|
||||
returns_neg_42_int16_t: {
|
||||
returns: "int16_t",
|
||||
args: [],
|
||||
},
|
||||
returns_neg_42_int32_t: {
|
||||
returns: "int32_t",
|
||||
args: [],
|
||||
},
|
||||
// returns_neg_42_int64_t: {
|
||||
// returns: "int64_t",
|
||||
// args: [],
|
||||
// },
|
||||
|
||||
identity_char: {
|
||||
returns: "char",
|
||||
args: ["char"],
|
||||
},
|
||||
// identity_float: {
|
||||
// returns: "float",
|
||||
// args: ["float"],
|
||||
// },
|
||||
identity_bool: {
|
||||
returns: "bool",
|
||||
args: ["bool"],
|
||||
},
|
||||
// identity_double: {
|
||||
// returns: "double",
|
||||
// args: ["double"],
|
||||
// },
|
||||
identity_int8_t: {
|
||||
returns: "int8_t",
|
||||
args: ["int8_t"],
|
||||
},
|
||||
identity_int16_t: {
|
||||
returns: "int16_t",
|
||||
args: ["int16_t"],
|
||||
},
|
||||
identity_int32_t: {
|
||||
returns: "int32_t",
|
||||
args: ["int32_t"],
|
||||
},
|
||||
// identity_int64_t: {
|
||||
// returns: "int64_t",
|
||||
// args: ["int64_t"],
|
||||
// },
|
||||
identity_uint8_t: {
|
||||
returns: "uint8_t",
|
||||
args: ["uint8_t"],
|
||||
},
|
||||
identity_uint16_t: {
|
||||
returns: "uint16_t",
|
||||
args: ["uint16_t"],
|
||||
},
|
||||
identity_uint32_t: {
|
||||
returns: "uint32_t",
|
||||
args: ["uint32_t"],
|
||||
},
|
||||
// identity_uint64_t: {
|
||||
// returns: "uint64_t",
|
||||
// args: ["uint64_t"],
|
||||
// },
|
||||
|
||||
add_char: {
|
||||
returns: "char",
|
||||
args: ["char", "char"],
|
||||
},
|
||||
add_float: {
|
||||
returns: "float",
|
||||
args: ["float", "float"],
|
||||
},
|
||||
add_double: {
|
||||
returns: "double",
|
||||
args: ["double", "double"],
|
||||
},
|
||||
add_int8_t: {
|
||||
returns: "int8_t",
|
||||
args: ["int8_t", "int8_t"],
|
||||
},
|
||||
add_int16_t: {
|
||||
returns: "int16_t",
|
||||
args: ["int16_t", "int16_t"],
|
||||
},
|
||||
add_int32_t: {
|
||||
returns: "int32_t",
|
||||
args: ["int32_t", "int32_t"],
|
||||
},
|
||||
// add_int64_t: {
|
||||
// returns: "int64_t",
|
||||
// args: ["int64_t", "int64_t"],
|
||||
// },
|
||||
add_uint8_t: {
|
||||
returns: "uint8_t",
|
||||
args: ["uint8_t", "uint8_t"],
|
||||
},
|
||||
add_uint16_t: {
|
||||
returns: "uint16_t",
|
||||
args: ["uint16_t", "uint16_t"],
|
||||
},
|
||||
add_uint32_t: {
|
||||
returns: "uint32_t",
|
||||
args: ["uint32_t", "uint32_t"],
|
||||
},
|
||||
|
||||
does_pointer_equal_42_as_int32_t: {
|
||||
returns: "bool",
|
||||
args: ["ptr"],
|
||||
},
|
||||
|
||||
ptr_should_point_to_42_as_int32_t: {
|
||||
returns: "ptr",
|
||||
args: [],
|
||||
},
|
||||
identity_ptr: {
|
||||
returns: "ptr",
|
||||
args: ["ptr"],
|
||||
},
|
||||
// add_uint64_t: {
|
||||
// returns: "uint64_t",
|
||||
// args: ["uint64_t", "uint64_t"],
|
||||
// },
|
||||
|
||||
cb_identity_true: {
|
||||
returns: "bool",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_false: {
|
||||
returns: "bool",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_42_char: {
|
||||
returns: "char",
|
||||
args: ["ptr"],
|
||||
},
|
||||
// cb_identity_42_float: {
|
||||
// returns: "float",
|
||||
// args: ["ptr"],
|
||||
// },
|
||||
// cb_identity_42_double: {
|
||||
// returns: "double",
|
||||
// args: ["ptr"],
|
||||
// },
|
||||
cb_identity_42_uint8_t: {
|
||||
returns: "uint8_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_neg_42_int8_t: {
|
||||
returns: "int8_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_42_uint16_t: {
|
||||
returns: "uint16_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_42_uint32_t: {
|
||||
returns: "uint32_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
// cb_identity_42_uint64_t: {
|
||||
// returns: "uint64_t",
|
||||
// args: ["ptr"],
|
||||
// },
|
||||
cb_identity_neg_42_int16_t: {
|
||||
returns: "int16_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
cb_identity_neg_42_int32_t: {
|
||||
returns: "int32_t",
|
||||
args: ["ptr"],
|
||||
},
|
||||
// cb_identity_neg_42_int64_t: {
|
||||
// returns: "int64_t",
|
||||
// args: ["ptr"],
|
||||
// },
|
||||
|
||||
return_a_function_ptr_to_function_that_returns_true: {
|
||||
returns: "ptr",
|
||||
args: [],
|
||||
},
|
||||
};
|
||||
|
||||
const {
|
||||
symbols: {
|
||||
returns_true,
|
||||
returns_false,
|
||||
return_a_function_ptr_to_function_that_returns_true,
|
||||
returns_42_char,
|
||||
returns_42_float,
|
||||
returns_42_double,
|
||||
returns_42_uint8_t,
|
||||
returns_neg_42_int8_t,
|
||||
returns_42_uint16_t,
|
||||
returns_42_uint32_t,
|
||||
returns_42_uint64_t,
|
||||
returns_neg_42_int16_t,
|
||||
returns_neg_42_int32_t,
|
||||
returns_neg_42_int64_t,
|
||||
identity_char,
|
||||
identity_float,
|
||||
identity_bool,
|
||||
identity_double,
|
||||
identity_int8_t,
|
||||
identity_int16_t,
|
||||
identity_int32_t,
|
||||
identity_int64_t,
|
||||
identity_uint8_t,
|
||||
identity_uint16_t,
|
||||
identity_uint32_t,
|
||||
identity_uint64_t,
|
||||
add_char,
|
||||
add_float,
|
||||
add_double,
|
||||
add_int8_t,
|
||||
add_int16_t,
|
||||
add_int32_t,
|
||||
add_int64_t,
|
||||
add_uint8_t,
|
||||
add_uint16_t,
|
||||
identity_ptr,
|
||||
add_uint32_t,
|
||||
add_uint64_t,
|
||||
does_pointer_equal_42_as_int32_t,
|
||||
ptr_should_point_to_42_as_int32_t,
|
||||
cb_identity_true,
|
||||
cb_identity_false,
|
||||
cb_identity_42_char,
|
||||
cb_identity_42_float,
|
||||
cb_identity_42_double,
|
||||
cb_identity_42_uint8_t,
|
||||
cb_identity_neg_42_int8_t,
|
||||
cb_identity_42_uint16_t,
|
||||
cb_identity_42_uint32_t,
|
||||
cb_identity_42_uint64_t,
|
||||
cb_identity_neg_42_int16_t,
|
||||
cb_identity_neg_42_int32_t,
|
||||
cb_identity_neg_42_int64_t,
|
||||
},
|
||||
close,
|
||||
} = dlopen("/tmp/bun-ffi-test.dylib", types);
|
||||
|
||||
group("add_char", () => {
|
||||
bench("add_char (raw)", () => raw_add_char(1, 1));
|
||||
bench("add_char", () => add_char(1, 1));
|
||||
});
|
||||
group("add_int16_t", () => {
|
||||
bench("add_int16_t (raw)", () => raw_add_int16_t(1, 1));
|
||||
bench("add_int16_t", () => add_int16_t(1, 1));
|
||||
});
|
||||
group("add_int32_t", () => {
|
||||
bench("add_int32_t (raw)", () => raw_add_int32_t(1, 1));
|
||||
bench("add_int32_t", () => add_int32_t(1, 1));
|
||||
});
|
||||
group("add_int8_t", () => {
|
||||
bench("add_int8_t (raw)", () => raw_add_int8_t(1, 1));
|
||||
bench("add_int8_t", () => add_int8_t(1, 1));
|
||||
});
|
||||
group("add_uint16_t", () => {
|
||||
bench("add_uint16_t (raw)", () => raw_add_uint16_t(1, 1));
|
||||
bench("add_uint16_t", () => add_uint16_t(1, 1));
|
||||
});
|
||||
group("add_uint32_t", () => {
|
||||
bench("add_uint32_t (raw)", () => raw_add_uint32_t(1, 1));
|
||||
bench("add_uint32_t", () => add_uint32_t(1, 1));
|
||||
});
|
||||
group("add_uint8_t", () => {
|
||||
bench("add_uint8_t (raw)", () => raw_add_uint8_t(1, 1));
|
||||
bench("add_uint8_t", () => add_uint8_t(1, 1));
|
||||
});
|
||||
group("identity_bool", () => {
|
||||
bench("identity_bool (raw)", () => raw_identity_bool(false));
|
||||
bench("identity_bool", () => identity_bool(true));
|
||||
});
|
||||
group("identity_char", () => {
|
||||
bench("identity_char (raw)", () => raw_identity_char(10));
|
||||
bench("identity_char", () => identity_char(10));
|
||||
});
|
||||
group("identity_int16_t", () => {
|
||||
bench("identity_int16_t (raw)", () => raw_identity_int16_t(10));
|
||||
bench("identity_int16_t", () => identity_int16_t(10));
|
||||
});
|
||||
group("identity_int32_t", () => {
|
||||
bench("identity_int32_t (raw)", () => raw_identity_int32_t(10));
|
||||
bench("identity_int32_t", () => identity_int32_t(10));
|
||||
});
|
||||
group("identity_int8_t", () => {
|
||||
bench("identity_int8_t (raw)", () => raw_identity_int8_t(10));
|
||||
bench("identity_int8_t", () => identity_int8_t(10));
|
||||
});
|
||||
group("identity_uint16_t", () => {
|
||||
bench("identity_uint16_t (raw)", () => raw_identity_uint16_t(10));
|
||||
bench("identity_uint16_t", () => identity_uint16_t(10));
|
||||
});
|
||||
group("identity_uint32_t", () => {
|
||||
bench("identity_uint32_t (raw)", () => raw_identity_uint32_t(10));
|
||||
bench("identity_uint32_t", () => identity_uint32_t(10));
|
||||
});
|
||||
group("identity_uint8_t", () => {
|
||||
bench("identity_uint8_t (raw)", () => raw_identity_uint8_t(10));
|
||||
bench("identity_uint8_t", () => identity_uint8_t(10));
|
||||
});
|
||||
group("returns_42_char", () => {
|
||||
bench("returns_42_char (raw)", () => raw_returns_42_char());
|
||||
bench("returns_42_char", () => returns_42_char());
|
||||
});
|
||||
group("returns_42_uint16_t", () => {
|
||||
bench("returns_42_uint16_t (raw)", () => raw_returns_42_uint16_t());
|
||||
bench("returns_42_uint16_t", () => returns_42_uint16_t());
|
||||
});
|
||||
group("returns_42_uint32_t", () => {
|
||||
bench("returns_42_uint32_t (raw)", () => raw_returns_42_uint32_t());
|
||||
bench("returns_42_uint32_t", () => returns_42_uint32_t());
|
||||
});
|
||||
group("returns_42_uint8_t", () => {
|
||||
bench("returns_42_uint8_t (raw)", () => raw_returns_42_uint8_t());
|
||||
bench("returns_42_uint8_t", () => returns_42_uint8_t());
|
||||
});
|
||||
group("returns_false", () => {
|
||||
bench("returns_false (raw)", () => raw_returns_false());
|
||||
bench("returns_false", () => returns_false());
|
||||
});
|
||||
group("returns_neg_42_int16_t", () => {
|
||||
bench("returns_neg_42_int16_t (raw)", () => raw_returns_neg_42_int16_t());
|
||||
bench("returns_neg_42_int16_t", () => returns_neg_42_int16_t());
|
||||
});
|
||||
group("returns_neg_42_int32_t", () => {
|
||||
bench("returns_neg_42_int32_t (raw)", () => raw_returns_neg_42_int32_t());
|
||||
bench("returns_neg_42_int32_t", () => returns_neg_42_int32_t());
|
||||
});
|
||||
group("returns_neg_42_int8_t", () => {
|
||||
bench("returns_neg_42_int8_t (raw)", () => raw_returns_neg_42_int8_t());
|
||||
bench("returns_neg_42_int8_t", () => returns_neg_42_int8_t());
|
||||
});
|
||||
group("returns_true", () => {
|
||||
bench("returns_true (raw)", () => raw_returns_true());
|
||||
bench("returns_true", () => returns_true());
|
||||
});
|
||||
|
||||
var raw_returns_true = returns_true.native ?? returns_true;
|
||||
var raw_returns_false = returns_false.native ?? returns_false;
|
||||
var raw_returns_42_char = returns_42_char.native ?? returns_42_char;
|
||||
var raw_returns_42_uint8_t = returns_42_uint8_t.native ?? returns_42_uint8_t;
|
||||
var raw_returns_neg_42_int8_t =
|
||||
returns_neg_42_int8_t.native ?? returns_neg_42_int8_t;
|
||||
var raw_returns_42_uint16_t = returns_42_uint16_t.native ?? returns_42_uint16_t;
|
||||
var raw_returns_42_uint32_t = returns_42_uint32_t.native ?? returns_42_uint32_t;
|
||||
var raw_returns_neg_42_int16_t =
|
||||
returns_neg_42_int16_t.native ?? returns_neg_42_int16_t;
|
||||
var raw_returns_neg_42_int32_t =
|
||||
returns_neg_42_int32_t.native ?? returns_neg_42_int32_t;
|
||||
var raw_identity_char = identity_char.native ?? identity_char;
|
||||
var raw_identity_bool = identity_bool.native ?? identity_bool;
|
||||
var raw_identity_bool = identity_bool.native ?? identity_bool;
|
||||
var raw_identity_int8_t = identity_int8_t.native ?? identity_int8_t;
|
||||
var raw_identity_int16_t = identity_int16_t.native ?? identity_int16_t;
|
||||
var raw_identity_int32_t = identity_int32_t.native ?? identity_int32_t;
|
||||
var raw_identity_uint8_t = identity_uint8_t.native ?? identity_uint8_t;
|
||||
var raw_identity_uint16_t = identity_uint16_t.native ?? identity_uint16_t;
|
||||
var raw_identity_uint32_t = identity_uint32_t.native ?? identity_uint32_t;
|
||||
var raw_add_char = add_char.native ?? add_char;
|
||||
var raw_add_int8_t = add_int8_t.native ?? add_int8_t;
|
||||
var raw_add_int16_t = add_int16_t.native ?? add_int16_t;
|
||||
var raw_add_int32_t = add_int32_t.native ?? add_int32_t;
|
||||
var raw_add_uint8_t = add_uint8_t.native ?? add_uint8_t;
|
||||
var raw_add_uint16_t = add_uint16_t.native ?? add_uint16_t;
|
||||
var raw_add_uint32_t = add_uint32_t.native ?? add_uint32_t;
|
||||
|
||||
run({ collect: false, percentiles: true });
|
||||
@@ -1,5 +0,0 @@
|
||||
// clang -O3 -shared -mtune=native ./noop.c -o noop.dylib
|
||||
|
||||
void noop();
|
||||
|
||||
void noop() {}
|
||||
Binary file not shown.
@@ -1,15 +0,0 @@
|
||||
import { dlopen } from "bun:ffi";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const {
|
||||
symbols: { noop },
|
||||
} = dlopen("./noop.dylib", {
|
||||
noop: {
|
||||
args: [],
|
||||
returns: "void",
|
||||
},
|
||||
});
|
||||
bench("noop", () => {
|
||||
noop();
|
||||
});
|
||||
run({ collect: false, percentiles: true });
|
||||
1
bench/ffi/plus100/.gitignore
vendored
1
bench/ffi/plus100/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
./napi-plus100
|
||||
@@ -1,37 +0,0 @@
|
||||
## FFI overhead comparison
|
||||
|
||||
This compares the cost of simple function calls going from JavaScript to native code and back in:
|
||||
|
||||
- Bun v0.0.79
|
||||
- napi.rs (Node v17.7.1)
|
||||
- Deno v1.21.1
|
||||
|
||||
To set up:
|
||||
|
||||
```bash
|
||||
bun setup
|
||||
```
|
||||
|
||||
To run the benchmark:
|
||||
|
||||
```bash
|
||||
bun bench
|
||||
```
|
||||
|
||||
**add 100 to a number**:
|
||||
|
||||
| Overhead | Using | Version | Platform |
|
||||
| -------- | ------- | ------- | --------------- |
|
||||
| 7ns | bun:ffi | 0.0.79 | macOS (aarch64) |
|
||||
| 18ns | napi.rs | 17.7.1 | macOS (aarch64) |
|
||||
| 580ns | Deno | 1.21.1 | macOS (aarch64) |
|
||||
|
||||
**function that does nothing**:
|
||||
|
||||
| Overhead | Using | Version | Platform |
|
||||
| -------- | ------- | ------- | --------------- |
|
||||
| 3ns | bun:ffi | 0.0.79 | macOS (aarch64) |
|
||||
| 15ns | napi.rs | 17.7.1 | macOS (aarch64) |
|
||||
| 431ns | Deno | 1.21.1 | macOS (aarch64) |
|
||||
|
||||
The native [functions](./plus100.c) called in Deno & Bun are the same. The function called with napi.rs is based on napi's official [package-template](https://github.com/napi-rs/package-template) in https://github.com/Jarred-Sumner/napi-plus100
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
rm -rf plus100-napi
|
||||
git clone https://github.com/Jarred-Sumner/napi-plus100 plus100-napi --depth=1
|
||||
cd plus100-napi
|
||||
npm install
|
||||
npm run build
|
||||
Binary file not shown.
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "plus100",
|
||||
"scripts": {
|
||||
"setup": "bun run napi-setup && bun run compile",
|
||||
"bench-deno": "deno run --allow-ffi --unstable -A plus100.deno.js",
|
||||
"napi-setup": "bash download-napi-plus100.sh",
|
||||
"bench-napi": "node plus100.napi.mjs",
|
||||
"bench-bun": "bun ./plus100.bun.js",
|
||||
"compile": "clang -mtune=native -O3 -shared ./plus100.c -o plus100.dylib",
|
||||
"bench": "bun run bench-bun && bun run bench-napi && bun run bench-deno"
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
import { run, bench, group, baseline } from "mitata";
|
||||
import { dlopen, suffix } from "bun:ffi";
|
||||
import { readdirSync } from "fs";
|
||||
|
||||
const {
|
||||
symbols: {
|
||||
plus100: { native: plus100 },
|
||||
noop,
|
||||
},
|
||||
close,
|
||||
} = dlopen(`./plus100.dylib`, {
|
||||
plus100: {
|
||||
args: ["int32_t"],
|
||||
returns: "int32_t",
|
||||
},
|
||||
noop: {
|
||||
args: [],
|
||||
},
|
||||
});
|
||||
const {
|
||||
plus100: plus100napi,
|
||||
noop: noopNapi,
|
||||
} = require("./plus100-napi/index.js");
|
||||
|
||||
group("plus100", () => {
|
||||
bench("plus100(1) ffi", () => {
|
||||
plus100(1);
|
||||
});
|
||||
|
||||
bench("plus100(1) napi", () => {
|
||||
plus100napi(1);
|
||||
});
|
||||
});
|
||||
|
||||
group("noop", () => {
|
||||
bench("noop() ffi", () => {
|
||||
noop();
|
||||
});
|
||||
|
||||
bench("noop() napi", () => {
|
||||
noopNapi();
|
||||
});
|
||||
});
|
||||
|
||||
// collect option collects benchmark returned values into array
|
||||
// prevents gc and can help with jit optimizing out functions
|
||||
await run({ collect: false, percentiles: true });
|
||||
console.log("\n");
|
||||
|
||||
if (plus100(1) !== 101) {
|
||||
throw new Error("plus100(1) !== 101");
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
// clang -mtune=native -O3 -shared ./plus100.c -o plus100.dylib
|
||||
#include <stdint.h>
|
||||
|
||||
int32_t plus100(int32_t a);
|
||||
int32_t plus100(int32_t a) { return a + 100; }
|
||||
|
||||
void noop(void);
|
||||
void noop(void) {}
|
||||
@@ -1,30 +0,0 @@
|
||||
import { run, bench, group, baseline } from "https://esm.sh/mitata";
|
||||
|
||||
const {
|
||||
symbols: { plus100: plus100, noop },
|
||||
close,
|
||||
} = Deno.dlopen("./plus100.dylib", {
|
||||
plus100: {
|
||||
parameters: ["i32"],
|
||||
result: "i32",
|
||||
},
|
||||
noop: {
|
||||
parameters: [],
|
||||
result: "void",
|
||||
},
|
||||
});
|
||||
bench("plus100(1) ", () => {
|
||||
plus100(1);
|
||||
});
|
||||
|
||||
bench("noop() ", () => {
|
||||
noop();
|
||||
});
|
||||
|
||||
// collect option collects benchmark returned values into array
|
||||
// prevents gc and can help with jit optimizing out functions
|
||||
await run({ collect: false, percentiles: true });
|
||||
|
||||
if (plus100(1) !== 101) {
|
||||
throw new Error("plus100(1) !== 101");
|
||||
}
|
||||
Binary file not shown.
@@ -1,19 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const { plus100, noop } =
|
||||
"Bun" in globalThis
|
||||
? require("./plus100-napi")
|
||||
: (await import("module")).createRequire(import.meta.url)("./plus100-napi");
|
||||
|
||||
bench("plus100(1) napi", () => {
|
||||
plus100(1);
|
||||
});
|
||||
bench("noop() napi", () => {
|
||||
noop();
|
||||
});
|
||||
await run({ collect: false, percentiles: true });
|
||||
console.log("\n");
|
||||
|
||||
if (plus100(1) !== 101) {
|
||||
throw new Error("plus100(1) !== 101");
|
||||
}
|
||||
@@ -4,10 +4,10 @@ This benchmarks bundler performance for CSS hot reloading.
|
||||
|
||||
## Results
|
||||
|
||||
bun is 14x faster than Next.js at hot reloading CSS.
|
||||
Bun is 14x faster than Next.js at hot reloading CSS.
|
||||
|
||||
```
|
||||
bun v0.0.34
|
||||
Bun v0.0.34
|
||||
Saving every 16ms
|
||||
|
||||
Frame time:
|
||||
@@ -40,7 +40,7 @@ The intent is to be as accurate as possible. Measuring times reported client-sid
|
||||
|
||||
It works like this:
|
||||
|
||||
1. `browser.js` loads either bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
1. `browser.js` loads either Bun or Next.js and a Chromium instance opened to the correct webpage
|
||||
2. `color-looper.zig` updates [`./src/colors.css`](./src/colors.css) in a loop up to `1024` times (1024 is arbitrary), sleeping every `16`ms or `32`ms (a CLI arg you can pass it). The `var(--timestamp)` CSS variable contains the UTC timestamp with precision of milliseconds and one extra decimal point
|
||||
3. `color-looper.zig` automatically records the screen via `screencapture` (builtin on macOS) and saves it, along with a `BigUint64Array` containing all the expected timestamps. When it's done, it writes to a designated file on disk which `browser.js` picks up as the signal to close the browser.
|
||||
4. `ffmpeg` converts each frame into a black and white `.tif` file, which `tesseract` then OCRs
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
// import { Buffer } from "buffer";
|
||||
var buf = new Buffer(1024);
|
||||
// var buf = new Uint8Array(1024);
|
||||
var view = new DataView(buf.buffer);
|
||||
var INTERVAL = 9999999;
|
||||
var time = (name, fn) => {
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
|
||||
console.time(name.padEnd("DataView.readBigUInt64 (LE)".length));
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
console.timeEnd(name.padEnd("DataView.readBigUInt64 (LE)".length));
|
||||
};
|
||||
|
||||
console.log(
|
||||
`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`,
|
||||
"\n"
|
||||
);
|
||||
var array = new Uint8Array(1024);
|
||||
time("Buffer[] ", () => buf[0]);
|
||||
time("Uint8Array[]", () => array[0]);
|
||||
console.log("");
|
||||
|
||||
time("Buffer.getBigInt64BE ", () => buf.readBigInt64BE(0));
|
||||
time("DataView.getBigInt64 (BE)", () => view.getBigInt64(0, false));
|
||||
console.log("");
|
||||
|
||||
time("Buffer.readBigInt64LE ", () => buf.readBigInt64LE(0));
|
||||
time("DataView.readBigInt64 (LE)", () => view.getBigInt64(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getBigUInt64BE ", () => buf.readBigUInt64BE(0));
|
||||
time("DataView.getBigUInt64 (BE)", () => view.getBigUint64(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readBigUInt64LE ", () => buf.readBigUInt64LE(0));
|
||||
time("DataView.readBigUInt64 (LE)", () => view.getBigUint64(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getDoubleBE ", () => buf.readDoubleBE(0));
|
||||
time("DataView.getDouble (BE)", () => view.getFloat64(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readDoubleLE ", () => buf.readDoubleLE(0));
|
||||
time("DataView.readDouble (LE)", () => view.getFloat64(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getFloatBE ", () => buf.readFloatBE(0));
|
||||
time("DataView.getFloat (BE)", () => view.getFloat32(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readFloatLE ", () => buf.readFloatLE(0));
|
||||
time("DataView.readFloat (LE)", () => view.getFloat32(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getInt16BE ", () => buf.readInt16BE(0));
|
||||
time("DataView.getInt16 (BE)", () => view.getInt16(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readInt16LE ", () => buf.readInt16LE(0));
|
||||
time("DataView.readInt16 (LE)", () => view.getInt16(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getInt32BE ", () => buf.readInt32BE(0));
|
||||
time("DataView.getInt32 (BE)", () => view.getInt32(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readInt32LE ", () => buf.readInt32LE(0));
|
||||
time("DataView.readInt32 (LE)", () => view.getInt32(0, true));
|
||||
console.log("");
|
||||
time("Buffer.readInt8 ", () => buf.readInt8(0));
|
||||
time("DataView.readInt (t8)", () => view.getInt8(0));
|
||||
console.log("");
|
||||
time("Buffer.getUInt16BE ", () => buf.readUInt16BE(0));
|
||||
time("DataView.getUInt16 (BE)", () => view.getUint16(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readUInt16LE ", () => buf.readUInt16LE(0));
|
||||
time("DataView.readUInt16 (LE)", () => view.getUint16(0, true));
|
||||
console.log("");
|
||||
time("Buffer.getUInt32BE ", () => buf.readUInt32BE(0));
|
||||
time("DataView.getUInt32 (BE)", () => view.getUint32(0, false));
|
||||
console.log("");
|
||||
time("Buffer.readUInt32LE ", () => buf.readUInt32LE(0));
|
||||
time("DataView.getUInt32 (LE)", () => view.getUint32(0, true));
|
||||
console.log("");
|
||||
time("Buffer.readUInt8 ", () => buf.readUInt8(0));
|
||||
time("DataView.getUInt (t8)", () => view.getUint8(0));
|
||||
console.log("");
|
||||
@@ -1,136 +0,0 @@
|
||||
// import { Buffer } from "buffer";
|
||||
var buf = new Buffer(1024);
|
||||
var view = new DataView(buf.buffer);
|
||||
var INTERVAL = 9999999;
|
||||
var time = (name, fn) => {
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
|
||||
console.time(name.padEnd('Buffer.write(string, "latin1")'.length));
|
||||
for (let i = 0; i < INTERVAL; i++) fn();
|
||||
console.timeEnd(name.padEnd('Buffer.write(string, "latin1")'.length));
|
||||
};
|
||||
|
||||
console.log(
|
||||
`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`,
|
||||
"\n"
|
||||
);
|
||||
|
||||
const stringToWrite = "hellooooohellooooo";
|
||||
|
||||
time('Buffer.write(string, "utf8")', () => buf.write(stringToWrite, "utf8"));
|
||||
time('Buffer.write(string, "ascii")', () => buf.write(stringToWrite, "ascii"));
|
||||
time('Buffer.write(string, "latin1")', () =>
|
||||
buf.write(stringToWrite, "latin1")
|
||||
);
|
||||
time("Buffer.readBigInt64BE ", () => buf.readBigInt64BE(0));
|
||||
// time("DataView.getBigInt64 (BE)", () => view.getBigInt64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readBigInt64LE ", () => buf.readBigInt64LE(0));
|
||||
// time("DataView.readBigInt64 (LE)", () => view.getBigInt64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readBigUInt64BE ", () => buf.readBigUInt64BE(0));
|
||||
// time("DataView.getBigUInt64 (BE)", () => view.getBigUint64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readBigUInt64LE ", () => buf.readBigUInt64LE(0));
|
||||
// time("DataView.readBigUInt64 (LE)", () => view.getBigUint64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readDoubleBE ", () => buf.readDoubleBE(0));
|
||||
// time("DataView.getDouble (BE)", () => view.getFloat64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readDoubleLE ", () => buf.readDoubleLE(0));
|
||||
// time("DataView.readDouble (LE)", () => view.getFloat64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readFloatBE ", () => buf.readFloatBE(0));
|
||||
// time("DataView.getFloat (BE)", () => view.getFloat32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readFloatLE ", () => buf.readFloatLE(0));
|
||||
// time("DataView.readFloat (LE)", () => view.getFloat32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readInt16BE ", () => buf.readInt16BE(0));
|
||||
// time("DataView.getInt16 (BE)", () => view.getInt16(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readInt16LE ", () => buf.readInt16LE(0));
|
||||
// time("DataView.readInt16 (LE)", () => view.getInt16(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readInt32BE ", () => buf.readInt32BE(0));
|
||||
// time("DataView.getInt32 (BE)", () => view.getInt32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readInt32LE ", () => buf.readInt32LE(0));
|
||||
// time("DataView.readInt32 (LE)", () => view.getInt32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readInt8 ", () => buf.readInt8(0));
|
||||
// time("DataView.readInt (t8)", () => view.getInt8(0));
|
||||
// console.log("");
|
||||
time("Buffer.readUInt16BE ", () => buf.readUInt16BE(0));
|
||||
// time("DataView.getUInt16 (BE)", () => view.getUint16(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readUInt16LE ", () => buf.readUInt16LE(0));
|
||||
// time("DataView.readUInt16 (LE)", () => view.getUint16(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readUInt32BE ", () => buf.readUInt32BE(0));
|
||||
// time("DataView.getUInt32 (BE)", () => view.getUint32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.readUInt32LE ", () => buf.readUInt32LE(0));
|
||||
// time("DataView.getUInt32 (LE)", () => view.getUint32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.readUInt8 ", () => buf.readUInt8(0));
|
||||
// time("DataView.getUInt (t8)", () => view.getUint8(0));
|
||||
// console.log("");
|
||||
|
||||
time("Buffer.writeBigInt64BE", () => buf.writeBigInt64BE(BigInt(0), 0));
|
||||
// time("DataView.getBigInt64 (BE)", () => view.getBigInt64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeBigInt64LE", () => buf.writeBigInt64LE(BigInt(0), 0));
|
||||
// time("DataView.readBigInt64 (LE)", () => view.getBigInt64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeBigUInt64BE", () => buf.writeBigUInt64BE(BigInt(0), 0));
|
||||
// time("DataView.getBigUInt64 (BE)", () => view.getBigUint64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeBigUInt64LE", () => buf.writeBigUInt64LE(BigInt(0), 0));
|
||||
// time("DataView.readBigUInt64 (LE)", () => view.getBigUint64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeDoubleBE ", () => buf.writeDoubleBE(0, 0));
|
||||
// time("DataView.getDouble (BE)", () => view.getFloat64(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeDoubleLE ", () => buf.writeDoubleLE(0, 0));
|
||||
// time("DataView.readDouble (LE)", () => view.getFloat64(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeFloatBE ", () => buf.writeFloatBE(0, 0));
|
||||
// time("DataView.getFloat (BE)", () => view.getFloat32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeFloatLE ", () => buf.writeFloatLE(0, 0));
|
||||
// time("DataView.readFloat (LE)", () => view.getFloat32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeInt16BE ", () => buf.writeInt16BE(0, 0));
|
||||
// time("DataView.getInt16 (BE)", () => view.getInt16(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeInt16LE ", () => buf.writeInt16LE(0, 0));
|
||||
// time("DataView.readInt16 (LE)", () => view.getInt16(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeInt32BE ", () => buf.writeInt32BE(0, 0));
|
||||
// time("DataView.getInt32 (BE)", () => view.getInt32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeInt32LE ", () => buf.writeInt32LE(0, 0));
|
||||
// time("DataView.readInt32 (LE)", () => view.getInt32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeInt8 ", () => buf.writeInt8(0, 0));
|
||||
// time("DataView.readInt (t8)", () => view.getInt8(0));
|
||||
// console.log("");
|
||||
time("Buffer.writeUInt16BE ", () => buf.writeUInt16BE(0, 0));
|
||||
// time("DataView.getUInt16 (BE)", () => view.getUint16(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeUInt16LE ", () => buf.writeUInt16LE(0, 0));
|
||||
// time("DataView.readUInt16 (LE)", () => view.getUint16(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeUInt32BE ", () => buf.writeUInt32BE(0, 0));
|
||||
// time("DataView.getUInt32 (BE)", () => view.getUint32(0, false));
|
||||
// console.log("");
|
||||
time("Buffer.writeUInt32LE ", () => buf.writeUInt32LE(0, 0));
|
||||
// time("DataView.getUInt32 (LE)", () => view.getUint32(0, true));
|
||||
// console.log("");
|
||||
time("Buffer.writeUInt8 ", () => buf.writeUInt8(0, 0));
|
||||
// time("DataView.getUInt (t8)", () => view.getUint8(0));
|
||||
// console.log("");
|
||||
|
||||
buf.writeUInt8(10, 10);
|
||||
console.assert(buf.readUInt8(10, 10) === 10);
|
||||
@@ -1,78 +0,0 @@
|
||||
const iterations = 1_000;
|
||||
|
||||
export var report = {
|
||||
async: 0,
|
||||
callback: 0,
|
||||
sync: 0,
|
||||
then: 0,
|
||||
};
|
||||
|
||||
const tests = {
|
||||
callback(n, cb) {
|
||||
if (n === iterations) return cb();
|
||||
tests.callback(1 + n, () => cb());
|
||||
},
|
||||
|
||||
sync(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
tests.sync(1 + n);
|
||||
},
|
||||
|
||||
async async(n) {
|
||||
if (n === iterations) return;
|
||||
|
||||
await tests.async(1 + n);
|
||||
},
|
||||
|
||||
then(n) {
|
||||
if (n === iterations) return;
|
||||
return Promise.resolve(1 + n).then(tests.then);
|
||||
},
|
||||
};
|
||||
|
||||
async function test(log) {
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.async(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`async/await: ${(report.async = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.callback(0, function () {
|
||||
if (log)
|
||||
console.log(
|
||||
`callback: ${(report.callback = (performance.now() - a).toFixed(
|
||||
4
|
||||
))}ms`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
await tests.then(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`then: ${(report.then = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const a = performance.now();
|
||||
tests.sync(0);
|
||||
if (log)
|
||||
console.log(
|
||||
`sync: ${(report.sync = (performance.now() - a).toFixed(4))}ms`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let warmup = 10;
|
||||
while (warmup--) await test();
|
||||
|
||||
await test(true);
|
||||
@@ -1,5 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
const input = resolve(process.argv[process.argv.length - 1]);
|
||||
|
||||
await write(stdout, file(input));
|
||||
@@ -1,6 +0,0 @@
|
||||
// works in both bun & node
|
||||
import { readFileSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv.slice(1);
|
||||
for (let i = 0; i < count; i++)
|
||||
console.log(arg.map((file) => readFileSync(file, "utf8")).join(""));
|
||||
@@ -1,4 +0,0 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const input = path.resolve(process.argv[process.argv.length - 1]);
|
||||
fs.createReadStream(input).pipe(process.stdout);
|
||||
@@ -1,4 +0,0 @@
|
||||
import path from "path";
|
||||
const input = path.resolve(process.argv[process.argv.length - 2]);
|
||||
const output = path.resolve(process.argv[process.argv.length - 1]);
|
||||
await Bun.write(Bun.file(output), Bun.file(input));
|
||||
@@ -1,3 +0,0 @@
|
||||
import { copyFileSync } from "node:fs";
|
||||
const arg = process.argv.slice(2);
|
||||
copyFileSync(arg[0], arg[1]);
|
||||
@@ -1,6 +0,0 @@
|
||||
const { existsSync } = require("fs");
|
||||
const cwd = process.cwd();
|
||||
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
for (let i = 0; i < count; i++) existsSync(cwd);
|
||||
@@ -1,20 +0,0 @@
|
||||
import { bench, group, run } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "bun";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
run({ collect: false, percentiles: true });
|
||||
@@ -1,20 +0,0 @@
|
||||
import { bench, group, run } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "zlib";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
run({ collect: false, percentiles: true });
|
||||
@@ -1,29 +0,0 @@
|
||||
import React from "react";
|
||||
|
||||
export default (
|
||||
<>
|
||||
‍‍‍‍‍‍&zwjϑ≅≅≅≅≅≅≅≅
|
||||
üüüüüüü£åååååååå
|
||||
óóóóóóóΠμμμμμμμμ
|
||||
♣♣♣♣♣♣&clubs⊕¢¢¢¢¢¢¢¢
|
||||
ÏÏÏÏÏÏÏ‌þþþþþþþþ
|
||||
………………&hellip°<<<<<<<<
|
||||
λλλλλλ&lambdaρ⇓⇓⇓⇓⇓⇓⇓⇓
|
||||
⇔⇔⇔⇔⇔⇔&hArrΤ≠≠≠≠≠≠≠≠
|
||||
ûûûûûûû•⊥⊥⊥⊥⊥⊥⊥⊥
|
||||
ôôôôôôô¶óóóóóóóó
|
||||
⊂⊂⊂⊂⊂⊂&subáíííííííí
|
||||
¸¸¸¸¸¸¸«§§§§§§§§
|
||||
¨¨¨¨¨¨¨ς""""""""
|
||||
ÔÔÔÔÔÔÔ∫¥¥¥¥¥¥¥¥
|
||||
ςςςςςς&sigmafΕ««««««««
|
||||
♦♦♦♦♦♦&diamsÓªªªªªªªª
|
||||
∞∞∞∞∞∞&infin⇔⋅⋅⋅⋅⋅⋅⋅⋅
|
||||
òòòòòòòý××××××××
|
||||
⌈⌈⌈⌈⌈⌈&lceilωßßßßßßßß
|
||||
≥≥≥≥≥≥&geßüüüüüüüü
|
||||
ρρρρρρ&rhoš∝∝∝∝∝∝∝∝
|
||||
◊◊◊◊◊◊&lozχ↵↵↵↵↵↵↵↵
|
||||
ÈÈÈÈÈÈÈ ÅÅÅÅÅÅÅÅ
|
||||
</>
|
||||
);
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "snippets",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"esbuild": "^0.14.12"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
for (let i = 0; i < count; i++) realpathSync(arg);
|
||||
@@ -1,4 +0,0 @@
|
||||
import decoding from "./jsx-entity-decoding";
|
||||
import ReactDOMServer from "react-dom/server.browser";
|
||||
|
||||
console.log(ReactDOMServer.renderToString(decoding));
|
||||
Binary file not shown.
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "scan",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.14.11"
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { useParams } from "remix";
|
||||
import type { LoaderFunction, ActionFunction } from "remix";
|
||||
|
||||
export const loader: LoaderFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export const action: ActionFunction = async ({ params }) => {
|
||||
console.log(params.postId);
|
||||
};
|
||||
|
||||
export default function PostRoute() {
|
||||
const params = useParams();
|
||||
console.log(params.postId);
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const imports = transpiler.scanImports(
|
||||
readFileSync("remix-route.ts", "utf8")
|
||||
);
|
||||
}
|
||||
console.timeEnd("Get exports");
|
||||
@@ -1,21 +0,0 @@
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
});
|
||||
|
||||
console.time("Get exports");
|
||||
const file = readFileSync("remix-route.ts", "utf8");
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const { imports, exports } = transpiler.scan(file);
|
||||
|
||||
for (let j = 0; j < fixture.length; j++) {
|
||||
if (fixture[j] !== exports[j]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
@@ -1,42 +0,0 @@
|
||||
import { build, buildSync } from "esbuild";
|
||||
import { readFileSync } from "fs";
|
||||
const fixture = ["action", "default", "loader"];
|
||||
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
|
||||
|
||||
const opts = {
|
||||
metafile: true,
|
||||
format: "esm",
|
||||
platform: "neutral",
|
||||
write: false,
|
||||
logLevel: "silent",
|
||||
stdin: {
|
||||
contents: readFileSync("remix-route.ts", "utf8"),
|
||||
loader: "ts",
|
||||
sourcefile: "remix-route.js",
|
||||
},
|
||||
};
|
||||
|
||||
const getExports = ({ metafile }) => {
|
||||
for (let i = 0; i < fixture.length; i++) {
|
||||
if (fixture[i] !== metafile.outputs["stdin.js"].exports[i]) {
|
||||
throw new Error("Mismatch");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
console.time("Get exports");
|
||||
|
||||
if (!process.env.SYNC) {
|
||||
var promises = new Array(ITERATIONS);
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
promises[i] = build(opts).then(getExports);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
} else {
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
getExports(buildSync(opts));
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd("Get exports");
|
||||
@@ -1,58 +0,0 @@
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
var transformSync;
|
||||
var transform;
|
||||
var opts;
|
||||
if (process.isBun) {
|
||||
const transpiler = new Bun.Transpiler({ loader: "jsx" });
|
||||
transformSync = transpiler.transformSync.bind(transpiler);
|
||||
transform = transpiler.transform.bind(transpiler);
|
||||
opts = "jsx";
|
||||
} else if (process.env["esbuild"]) {
|
||||
try {
|
||||
const esbuild = await import("esbuild");
|
||||
transformSync = esbuild.transformSync;
|
||||
transform = esbuild.transform;
|
||||
opts = { loader: "jsx" };
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["swc"]) {
|
||||
try {
|
||||
const swc = await import("@swc/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
} else if (process.env["babel"]) {
|
||||
try {
|
||||
const swc = await import("@babel/core");
|
||||
transformSync = swc.transformSync;
|
||||
transform = swc.transform;
|
||||
opts = {
|
||||
sourceMaps: false,
|
||||
presets: [(await import("@babel/preset-react")).default],
|
||||
};
|
||||
} catch (exception) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
||||
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
|
||||
|
||||
if (process.env.ASYNC) {
|
||||
console.log(await transform(code, opts));
|
||||
} else {
|
||||
console.log(transformSync(code, opts));
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
rm -rf Northwind_large.sqlite.zip
|
||||
wget https://github.com/jpwhite3/northwind-SQLite3/blob/master/Northwind_large.sqlite.zip
|
||||
unzip Northwind_large.sqlite.zip
|
||||
rm Northwind_large.sqlite.zip
|
||||
mv Northwind_large.sqlite /tmp/northwind.sqlite
|
||||
1071
bench/sqlite/package-lock.json
generated
1071
bench/sqlite/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^7.5.1",
|
||||
"mitata": "^0.0.14"
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
const db = createRequire(import.meta.url)("better-sqlite3")(
|
||||
"/tmp/northwind.sqlite"
|
||||
);
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
bench('SELECT * FROM "Order"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Product"`);
|
||||
|
||||
bench('SELECT * FROM "Product"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "OrderDetail"`);
|
||||
|
||||
bench('SELECT * FROM "OrderDetail"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
run({ json: false });
|
||||
@@ -1,26 +0,0 @@
|
||||
import { DB } from "https://deno.land/x/sqlite/mod.ts";
|
||||
import { bench, run } from "https://esm.run/mitata";
|
||||
const db = new DB("/tmp/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepareQuery(`SELECT * FROM "Order"`);
|
||||
bench('SELECT * FROM "Order"', () => {
|
||||
sql.allEntries();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepareQuery(`SELECT * FROM "Product"`);
|
||||
bench('SELECT * FROM "Product"', () => {
|
||||
sql.allEntries();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepareQuery(`SELECT * FROM "OrderDetail"`);
|
||||
bench('SELECT * FROM "OrderDetail"', () => {
|
||||
sql.allEntries();
|
||||
});
|
||||
}
|
||||
|
||||
run({ json: false });
|
||||
@@ -1,26 +0,0 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { Database } from "bun:sqlite";
|
||||
const db = Database.open("/tmp/northwind.sqlite");
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
bench('SELECT * FROM "Order"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Product"`);
|
||||
bench('SELECT * FROM "Product"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "OrderDetail"`);
|
||||
bench('SELECT * FROM "OrderDetail"', () => {
|
||||
sql.all();
|
||||
});
|
||||
}
|
||||
|
||||
run({ json: false });
|
||||
748
build.zig
748
build.zig
@@ -1,25 +1,17 @@
|
||||
const std = @import("std");
|
||||
const resolve_path = @import("./src/resolver/resolve_path.zig");
|
||||
|
||||
fn pkgPath(comptime out: []const u8) std.build.FileSource {
|
||||
const outpath = comptime std.fs.path.dirname(@src().file).? ++ std.fs.path.sep_str ++ out;
|
||||
return .{ .path = outpath };
|
||||
}
|
||||
pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void {
|
||||
const picohttp = step.addPackage(.{
|
||||
.name = "picohttp",
|
||||
.path = .{ .path = "src/deps/picohttp.zig" },
|
||||
});
|
||||
|
||||
step.addIncludeDir("src/deps");
|
||||
|
||||
if (with_obj) {
|
||||
step.addObjectFile("src/deps/picohttpparser.o");
|
||||
}
|
||||
|
||||
step.addIncludeDir("src/deps");
|
||||
|
||||
if (with_obj) {
|
||||
step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libssl.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
|
||||
}
|
||||
|
||||
// step.add("/Users/jarred/Code/WebKit/WebKitBuild/Release/lib/libWTF.a");
|
||||
|
||||
// ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON"
|
||||
@@ -27,188 +19,18 @@ pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void
|
||||
// homebrew-provided icu4c
|
||||
}
|
||||
|
||||
const color_map = std.ComptimeStringMap([]const u8, .{
|
||||
&.{ "black", "30m" },
|
||||
&.{ "blue", "34m" },
|
||||
&.{ "b", "1m" },
|
||||
&.{ "d", "2m" },
|
||||
&.{ "cyan", "36m" },
|
||||
&.{ "green", "32m" },
|
||||
&.{ "magenta", "35m" },
|
||||
&.{ "red", "31m" },
|
||||
&.{ "white", "37m" },
|
||||
&.{ "yellow", "33m" },
|
||||
});
|
||||
|
||||
fn addInternalPackages(step: *std.build.LibExeObjStep, _: std.mem.Allocator, target: anytype) !void {
|
||||
var boringssl: std.build.Pkg = .{
|
||||
.name = "boringssl",
|
||||
.path = pkgPath("src/boringssl.zig"),
|
||||
};
|
||||
|
||||
var datetime: std.build.Pkg = .{
|
||||
.name = "datetime",
|
||||
.path = pkgPath("src/deps/zig-datetime/src/datetime.zig"),
|
||||
};
|
||||
|
||||
var thread_pool: std.build.Pkg = .{
|
||||
.name = "thread_pool",
|
||||
.path = pkgPath("src/thread_pool.zig"),
|
||||
};
|
||||
|
||||
var crash_reporter: std.build.Pkg = .{
|
||||
.name = "crash_reporter",
|
||||
.path = pkgPath("src/deps/backtrace.zig"),
|
||||
};
|
||||
|
||||
var picohttp: std.build.Pkg = .{
|
||||
.name = "picohttp",
|
||||
.path = pkgPath("src/deps/picohttp.zig"),
|
||||
};
|
||||
|
||||
var io_darwin: std.build.Pkg = .{
|
||||
.name = "io",
|
||||
.path = pkgPath("src/io/io_darwin.zig"),
|
||||
};
|
||||
var io_linux: std.build.Pkg = .{
|
||||
.name = "io",
|
||||
.path = pkgPath("src/io/io_linux.zig"),
|
||||
};
|
||||
var io_stub: std.build.Pkg = .{
|
||||
.name = "io",
|
||||
.path = pkgPath("src/io/io_stub.zig"),
|
||||
};
|
||||
|
||||
var lol_html: std.build.Pkg = .{
|
||||
.name = "lolhtml",
|
||||
.path = pkgPath("src/deps/lol-html.zig"),
|
||||
};
|
||||
|
||||
var io = if (target.isDarwin())
|
||||
io_darwin
|
||||
else if (target.isLinux())
|
||||
io_linux
|
||||
else
|
||||
io_stub;
|
||||
|
||||
var strings: std.build.Pkg = .{
|
||||
.name = "strings",
|
||||
.path = pkgPath("src/string_immutable.zig"),
|
||||
};
|
||||
|
||||
var clap: std.build.Pkg = .{
|
||||
.name = "clap",
|
||||
.path = pkgPath("src/deps/zig-clap/clap.zig"),
|
||||
};
|
||||
|
||||
var http: std.build.Pkg = .{
|
||||
.name = "http",
|
||||
.path = pkgPath("src/http_client_async.zig"),
|
||||
};
|
||||
|
||||
var javascript_core_real: std.build.Pkg = .{
|
||||
.name = "javascript_core",
|
||||
.path = pkgPath("src/jsc.zig"),
|
||||
};
|
||||
|
||||
var javascript_core_stub: std.build.Pkg = .{
|
||||
.name = "javascript_core",
|
||||
.path = pkgPath("src/jsc_stub.zig"),
|
||||
};
|
||||
|
||||
var uws: std.build.Pkg = .{
|
||||
.name = "uws",
|
||||
.path = pkgPath("src/deps/uws.zig"),
|
||||
};
|
||||
|
||||
var javascript_core = if (target.getOsTag() == .freestanding)
|
||||
javascript_core_stub
|
||||
else
|
||||
javascript_core_real;
|
||||
|
||||
var analytics: std.build.Pkg = .{
|
||||
.name = "analytics",
|
||||
.path = pkgPath("src/analytics.zig"),
|
||||
};
|
||||
|
||||
io.dependencies = &.{analytics};
|
||||
|
||||
javascript_core.dependencies = &.{ http, strings, picohttp, io, uws };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
http.dependencies = &.{
|
||||
strings,
|
||||
picohttp,
|
||||
io,
|
||||
boringssl,
|
||||
thread_pool,
|
||||
};
|
||||
thread_pool.dependencies = &.{ io, http };
|
||||
|
||||
thread_pool.dependencies = &.{
|
||||
io,
|
||||
http,
|
||||
};
|
||||
|
||||
step.addPackage(thread_pool);
|
||||
step.addPackage(picohttp);
|
||||
step.addPackage(io);
|
||||
step.addPackage(strings);
|
||||
step.addPackage(clap);
|
||||
step.addPackage(http);
|
||||
step.addPackage(boringssl);
|
||||
step.addPackage(javascript_core);
|
||||
step.addPackage(crash_reporter);
|
||||
step.addPackage(datetime);
|
||||
step.addPackage(lol_html);
|
||||
step.addPackage(uws);
|
||||
}
|
||||
var output_dir: []const u8 = "";
|
||||
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
|
||||
var file = std.fs.cwd().openFile(filepath, .{ .mode = .read_only }) catch |err| {
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
|
||||
var file = std.fs.cwd().openFile(filepath, .{ .read = true }) catch |err| {
|
||||
const linux_only = "\nOn Linux, you'll need to compile libiconv manually and copy the .a file into src/deps.";
|
||||
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`." ++ linux_only, .{ filepath, @errorName(err) });
|
||||
};
|
||||
file.close();
|
||||
|
||||
return filepath;
|
||||
}
|
||||
|
||||
fn updateRuntime() anyerror!void {
|
||||
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
|
||||
const runtime_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
|
||||
);
|
||||
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
|
||||
defer runtime_version_file.close();
|
||||
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
|
||||
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
|
||||
const fallback_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
|
||||
);
|
||||
|
||||
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
|
||||
|
||||
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
|
||||
|
||||
fallback_version_file.close();
|
||||
}
|
||||
|
||||
var x64 = "x64";
|
||||
var mode: std.builtin.Mode = undefined;
|
||||
|
||||
const Builder = std.build.Builder;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
const Mode = std.builtin.Mode;
|
||||
const fs = std.fs;
|
||||
|
||||
pub fn build(b: *std.build.Builder) !void {
|
||||
// Standard target options allows the person running `zig build` to choose
|
||||
// what target to build for. Here we do not override the defaults, which
|
||||
@@ -217,8 +39,9 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
var target = b.standardTargetOptions(.{});
|
||||
// Standard release options allow the person running `zig build` to select
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
mode = b.standardReleaseOptions();
|
||||
const mode = b.standardReleaseOptions();
|
||||
|
||||
var cwd_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
const cwd: []const u8 = b.pathFromRoot(".");
|
||||
var exe: *std.build.LibExeObjStep = undefined;
|
||||
var output_dir_buf = std.mem.zeroes([4096]u8);
|
||||
@@ -232,12 +55,10 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
if (std.mem.eql(u8, os_tagname, "macos")) {
|
||||
os_tagname = "darwin";
|
||||
if (arch.isAARCH64()) {
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
} else if (arch.isX86()) {
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
target.os_version_min = std.build.Target.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
|
||||
}
|
||||
} else if (target.isLinux()) {
|
||||
target.setGnuLibCVersion(2, 27, 0);
|
||||
}
|
||||
|
||||
std.mem.copy(
|
||||
@@ -259,319 +80,318 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
|
||||
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
|
||||
output_dir = b.pathFromRoot(output_dir_base);
|
||||
const output_dir = b.pathFromRoot(output_dir_base);
|
||||
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
|
||||
exe = b.addExecutable(bun_executable_name, if (target.getOsTag() == std.Target.Os.Tag.freestanding)
|
||||
"src/main_wasm.zig"
|
||||
else
|
||||
"src/main.zig");
|
||||
|
||||
if (target.getOsTag() == .wasi) {
|
||||
exe.enable_wasmtime = true;
|
||||
exe = b.addExecutable(bun_executable_name, "src/main_wasi.zig");
|
||||
exe.linkage = .dynamic;
|
||||
exe.setOutputDir(output_dir);
|
||||
} else if (target.getCpuArch().isWasm()) {
|
||||
// exe = b.addExecutable(
|
||||
// "bun",
|
||||
// "src/main_wasm.zig",
|
||||
// );
|
||||
// exe.is_linking_libc = false;
|
||||
// exe.is_dynamic = true;
|
||||
var lib = b.addExecutable(bun_executable_name, "src/main_wasm.zig");
|
||||
lib.single_threaded = true;
|
||||
// exe.want_lto = true;
|
||||
// exe.linkLibrary(lib);
|
||||
|
||||
if (mode == std.builtin.Mode.Debug) {
|
||||
// exception_handling
|
||||
var features = target.getCpuFeatures();
|
||||
features.addFeature(2);
|
||||
target.updateCpuFeatures(&features);
|
||||
} else {
|
||||
// lib.strip = true;
|
||||
}
|
||||
|
||||
lib.setOutputDir(output_dir);
|
||||
lib.want_lto = true;
|
||||
b.install_path = lib.getOutputSource().getPath(b);
|
||||
|
||||
std.debug.print("Build: ./{s}\n", .{b.install_path});
|
||||
b.default_step.dependOn(&lib.step);
|
||||
b.verbose_link = true;
|
||||
lib.setTarget(target);
|
||||
lib.setBuildMode(mode);
|
||||
|
||||
std.fs.deleteTreeAbsolute(std.fs.path.join(b.allocator, &.{ cwd, lib.getOutputSource().getPath(b) }) catch unreachable) catch {};
|
||||
var install = b.getInstallStep();
|
||||
lib.strip = false;
|
||||
lib.install();
|
||||
|
||||
const run_cmd = lib.run();
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
return;
|
||||
} else {
|
||||
exe = b.addExecutable(bun_executable_name, "src/main.zig");
|
||||
}
|
||||
// exe.setLibCFile("libc.txt");
|
||||
exe.linkLibC();
|
||||
// exe.linkLibCpp();
|
||||
exe.addPackage(.{
|
||||
.name = "clap",
|
||||
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
|
||||
});
|
||||
|
||||
exe.setOutputDir(output_dir);
|
||||
updateRuntime() catch {};
|
||||
var cwd_dir = std.fs.cwd();
|
||||
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .read = true });
|
||||
const runtime_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try runtime_out_file.readToEndAlloc(b.allocator, try runtime_out_file.getEndPos()),
|
||||
);
|
||||
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
|
||||
defer runtime_version_file.close();
|
||||
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
|
||||
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .read = true });
|
||||
const fallback_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try fallback_out_file.readToEndAlloc(b.allocator, try fallback_out_file.getEndPos()),
|
||||
);
|
||||
|
||||
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
|
||||
|
||||
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
|
||||
|
||||
defer fallback_version_file.close();
|
||||
|
||||
exe.setTarget(target);
|
||||
exe.setBuildMode(mode);
|
||||
b.install_path = output_dir;
|
||||
|
||||
var typings_exe = b.addExecutable("typescript-decls", "src/typegen.zig");
|
||||
|
||||
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMin().semver
|
||||
else .{ .major = 0, .minor = 0, .patch = 0 };
|
||||
|
||||
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMax().semver
|
||||
else .{ .major = 0, .minor = 0, .patch = 0 };
|
||||
var javascript = b.addExecutable("spjs", "src/main_javascript.zig");
|
||||
var typings_exe = b.addExecutable("typescript-decls", "src/javascript/jsc/typescript.zig");
|
||||
javascript.setMainPkgPath(b.pathFromRoot("."));
|
||||
typings_exe.setMainPkgPath(b.pathFromRoot("."));
|
||||
exe.setMainPkgPath(b.pathFromRoot("."));
|
||||
|
||||
// exe.want_lto = true;
|
||||
defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
|
||||
defer b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
min_version,
|
||||
max_version,
|
||||
},
|
||||
).step);
|
||||
if (!target.getCpuArch().isWasm()) {
|
||||
b.default_step.dependOn(&exe.step);
|
||||
|
||||
var obj_step = b.step("obj", "Build bun as a .o file");
|
||||
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
|
||||
|
||||
{
|
||||
obj.setTarget(target);
|
||||
addPicoHTTP(obj, false);
|
||||
obj.setMainPkgPath(b.pathFromRoot("."));
|
||||
|
||||
var opts = b.addOptions();
|
||||
opts.addOption(
|
||||
bool,
|
||||
"bindgen",
|
||||
false,
|
||||
);
|
||||
obj.addOptions("build_options", opts);
|
||||
|
||||
try addInternalPackages(
|
||||
obj,
|
||||
const bindings_dir = std.fs.path.join(
|
||||
b.allocator,
|
||||
target,
|
||||
);
|
||||
&.{
|
||||
cwd,
|
||||
"src",
|
||||
"javascript",
|
||||
"jsc",
|
||||
"bindings-obj",
|
||||
},
|
||||
) catch unreachable;
|
||||
|
||||
var bindings_dir_ = cwd_dir.openDir(bindings_dir, .{ .iterate = true }) catch std.debug.panic("Error opening bindings directory. Please make sure you ran `make jsc`. {s} should exist", .{bindings_dir});
|
||||
var bindings_walker = bindings_dir_.walk(b.allocator) catch std.debug.panic("Error reading bindings directory {s}", .{bindings_dir});
|
||||
|
||||
var bindings_files = std.ArrayList([]const u8).init(b.allocator);
|
||||
|
||||
while (bindings_walker.next() catch unreachable) |entry| {
|
||||
if (std.mem.eql(u8, std.fs.path.extension(entry.basename), ".o")) {
|
||||
bindings_files.append(bindings_dir_.realpathAlloc(b.allocator, entry.path) catch unreachable) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
// // References:
|
||||
// // - https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/webkit.gyp
|
||||
// // - https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/docs/webkit_fork_and_compilation.md#webkit-port-and-compilation
|
||||
// const flags = [_][]const u8{
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders",
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/WTF/Headers",
|
||||
// "-Isrc/javascript/jsc/WebKit/WebKitBuild/Release/ICU/Headers",
|
||||
// "-DSTATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
// "-DSTATICALLY_LINKED_WITH_WTF=1",
|
||||
// "-DBUILDING_WITH_CMAKE=1",
|
||||
// "-DNOMINMAX",
|
||||
// "-DENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
// "-DBUILDING_JSCONLY__",
|
||||
// "-DASSERT_ENABLED=0", // missing symbol errors like this will happen "JSC::DFG::DoesGCCheck::verifyCanGC(JSC::VM&)"
|
||||
// "-Isrc/JavaScript/jsc/WebKit/WebKitBuild/Release/", // config.h,
|
||||
// "-Isrc/JavaScript/jsc/bindings/",
|
||||
// "-Isrc/javascript/jsc/WebKit/Source/bmalloc",
|
||||
// "-std=gnu++17",
|
||||
// if (target.getOsTag() == .macos) "-DUSE_FOUNDATION=1" else "",
|
||||
// if (target.getOsTag() == .macos) "-DUSE_CF_RETAIN_PTR=1" else "",
|
||||
// };
|
||||
const headers_step = b.step("headers-obj", "JSC headers Step #1");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/javascript/jsc/bindings/bindings-generator.zig");
|
||||
headers_obj.setMainPkgPath(javascript.main_pkg_path.?);
|
||||
headers_step.dependOn(&headers_obj.step);
|
||||
|
||||
{
|
||||
obj_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
b.default_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}",
|
||||
.{
|
||||
triplet,
|
||||
min_version,
|
||||
max_version,
|
||||
target.getOsVersionMin().semver,
|
||||
target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
}
|
||||
b.default_step.dependOn(&exe.step);
|
||||
|
||||
obj_step.dependOn(&obj.step);
|
||||
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
obj.linkLibC();
|
||||
if (mode == std.builtin.Mode.Debug)
|
||||
obj.emit_llvm_ir = .{
|
||||
.emit_to = try std.fmt.allocPrint(b.allocator, "{s}/{s}.ll", .{ output_dir, bun_executable_name }),
|
||||
};
|
||||
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = true;
|
||||
obj.omit_frame_pointer = false;
|
||||
|
||||
b.default_step.dependOn(&obj.step);
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
|
||||
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
|
||||
log_step.step.dependOn(&obj.step);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("headers-obj", "Build JavaScriptCore headers");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
var opts = b.addOptions();
|
||||
opts.addOption(
|
||||
bool,
|
||||
"bindgen",
|
||||
true,
|
||||
);
|
||||
headers_obj.addOptions("build_options", opts);
|
||||
headers_obj.linkLibCpp();
|
||||
}
|
||||
|
||||
{
|
||||
const wasm = b.step("bun-wasm", "Build WASM");
|
||||
var wasm_step: *std.build.LibExeObjStep = b.addStaticLibrary("bun-wasm", "src/main_wasm.zig");
|
||||
defer wasm.dependOn(&wasm_step.step);
|
||||
wasm_step.strip = false;
|
||||
// wasm_step.link_function_sections = true;
|
||||
// wasm_step.link_emit_relocs = true;
|
||||
// wasm_step.single_threaded = true;
|
||||
try configureObjectStep(b, wasm_step, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("httpbench-obj", "Build HTTPBench tool (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("httpbench", "misctools/http_bench.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("fetch-obj", "Build fetch (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("fetch", "misctools/fetch.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("string-bench", "Build string bench");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("string-bench", "src/bench/string-handling.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("sha-bench-obj", "Build sha bench");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("sha", "src/sha.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("vlq-bench", "Build vlq bench");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("vlq-bench", "src/sourcemap/vlq_bench.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("tgz-obj", "Build tgz (object files)");
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addObject("tgz", "misctools/tgz.zig");
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("test", "Build test");
|
||||
|
||||
var test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
var test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
var test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
|
||||
var headers_obj: *std.build.LibExeObjStep = b.addTest(test_file orelse "src/main.zig");
|
||||
headers_obj.setFilter(test_filter);
|
||||
if (test_bin_) |test_bin| {
|
||||
headers_obj.name = std.fs.path.basename(test_bin);
|
||||
if (std.fs.path.dirname(test_bin)) |dir| headers_obj.setOutputDir(dir);
|
||||
}
|
||||
|
||||
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
|
||||
try linkObjectFiles(b, headers_obj, target);
|
||||
{
|
||||
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"});
|
||||
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
|
||||
headers_step.dependOn(&before.step);
|
||||
headers_step.dependOn(&headers_obj.step);
|
||||
headers_step.dependOn(&after.step);
|
||||
var steps = [_]*std.build.LibExeObjStep{ exe, javascript, typings_exe };
|
||||
|
||||
// const single_threaded = b.option(bool, "single-threaded", "Build single-threaded") orelse false;
|
||||
|
||||
for (steps) |step, i| {
|
||||
step.linkLibC();
|
||||
step.linkLibCpp();
|
||||
addPicoHTTP(
|
||||
step,
|
||||
true,
|
||||
);
|
||||
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libJavaScriptCore.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libWTF.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libbmalloc.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libarchive.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libs2n.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/zlib/libz.a"));
|
||||
step.addObjectFile(panicIfNotFound("src/deps/mimalloc/libmimalloc.a"));
|
||||
step.addLibPath("src/deps/mimalloc");
|
||||
step.addIncludeDir("src/deps/mimalloc");
|
||||
|
||||
// step.single_threaded = single_threaded;
|
||||
|
||||
if (target.getOsTag() == .macos) {
|
||||
const homebrew_prefix = comptime if (std.Target.current.cpu.arch == .aarch64)
|
||||
"/opt/homebrew/"
|
||||
else
|
||||
"/usr/local/";
|
||||
|
||||
// We must link ICU statically
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicudata.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicui18n.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib/libicuuc.a"));
|
||||
step.addObjectFile(panicIfNotFound(homebrew_prefix ++ "opt/libiconv/lib/libiconv.a"));
|
||||
|
||||
// icucore is a weird macOS only library
|
||||
step.linkSystemLibrary("icucore");
|
||||
step.addLibPath(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/lib"));
|
||||
step.addIncludeDir(panicIfNotFound(homebrew_prefix ++ "opt/icu4c/include"));
|
||||
} else {
|
||||
step.linkSystemLibrary("icuuc");
|
||||
step.linkSystemLibrary("icudata");
|
||||
step.linkSystemLibrary("icui18n");
|
||||
step.addObjectFile(panicIfNotFound("src/deps/libiconv.a"));
|
||||
}
|
||||
|
||||
for (bindings_files.items) |binding| {
|
||||
step.addObjectFile(
|
||||
binding,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (headers_obj.packages.items) |pkg_| {
|
||||
const pkg: std.build.Pkg = pkg_;
|
||||
if (std.mem.eql(u8, pkg.name, "clap")) continue;
|
||||
var test_ = b.addTestSource(pkg.path);
|
||||
{
|
||||
var obj_step = b.step("obj", "Build Bun as a .o file");
|
||||
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
|
||||
obj.setTarget(target);
|
||||
addPicoHTTP(obj, false);
|
||||
obj.addPackage(.{
|
||||
.name = "clap",
|
||||
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
|
||||
});
|
||||
|
||||
test_.setMainPkgPath(obj.main_pkg_path.?);
|
||||
test_.setTarget(target);
|
||||
try linkObjectFiles(b, test_, target);
|
||||
if (pkg.dependencies) |children| {
|
||||
test_.packages = std.ArrayList(std.build.Pkg).init(b.allocator);
|
||||
try test_.packages.appendSlice(children);
|
||||
{
|
||||
obj_step.dependOn(&b.addLog(
|
||||
"Build {s} v{} - v{}\n",
|
||||
.{
|
||||
triplet,
|
||||
obj.target.getOsVersionMin().semver,
|
||||
obj.target.getOsVersionMax().semver,
|
||||
},
|
||||
).step);
|
||||
}
|
||||
|
||||
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
|
||||
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
|
||||
headers_step.dependOn(&before.step);
|
||||
headers_step.dependOn(&test_.step);
|
||||
headers_step.dependOn(&after.step);
|
||||
obj_step.dependOn(&obj.step);
|
||||
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
obj.linkLibC();
|
||||
obj.linkLibCpp();
|
||||
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
headers_obj.setTarget(target);
|
||||
headers_obj.addPackage(.{
|
||||
.name = "clap",
|
||||
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
|
||||
});
|
||||
|
||||
headers_obj.setOutputDir(output_dir);
|
||||
headers_obj.setBuildMode(mode);
|
||||
headers_obj.linkLibC();
|
||||
headers_obj.linkLibCpp();
|
||||
headers_obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
headers_obj.link_emit_relocs = true;
|
||||
headers_obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
b.default_step.dependOn(&exe.step);
|
||||
}
|
||||
|
||||
try configureObjectStep(b, typings_exe, target, obj.main_pkg_path.?);
|
||||
try linkObjectFiles(b, typings_exe, target);
|
||||
javascript.strip = false;
|
||||
javascript.packages = std.ArrayList(std.build.Pkg).fromOwnedSlice(b.allocator, b.allocator.dupe(std.build.Pkg, exe.packages.items) catch unreachable);
|
||||
|
||||
javascript.setOutputDir(output_dir);
|
||||
javascript.setBuildMode(mode);
|
||||
|
||||
const run_cmd = exe.run();
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
|
||||
log_step.step.dependOn(&exe.step);
|
||||
|
||||
var typings_cmd: *std.build.RunStep = typings_exe.run();
|
||||
typings_cmd.cwd = cwd;
|
||||
typings_cmd.addArg(cwd);
|
||||
typings_cmd.addArg("types");
|
||||
typings_cmd.step.dependOn(&typings_exe.step);
|
||||
if (target.getOsTag() == .macos) {
|
||||
typings_exe.linkSystemLibrary("icucore");
|
||||
typings_exe.linkSystemLibrary("iconv");
|
||||
typings_exe.addLibPath(
|
||||
"/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/lib",
|
||||
);
|
||||
}
|
||||
|
||||
typings_exe.linkLibC();
|
||||
typings_exe.linkLibCpp();
|
||||
typings_exe.setMainPkgPath(cwd);
|
||||
|
||||
var typings_step = b.step("types", "Build TypeScript types");
|
||||
typings_step.dependOn(&typings_cmd.step);
|
||||
|
||||
var javascript_cmd = b.step("spjs", "Build standalone JavaScript runtime. Must run \"make jsc\" first.");
|
||||
javascript_cmd.dependOn(&javascript.step);
|
||||
}
|
||||
|
||||
pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
|
||||
|
||||
// Due to limitations in std.build.Builder
|
||||
// we cannot use this with debugging
|
||||
// so I am leaving this here for now, with the eventual intent to switch to std.build.Builder
|
||||
// but it is dead code
|
||||
pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype) !void {
|
||||
if (target.getOsTag() == .freestanding)
|
||||
return;
|
||||
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
|
||||
const arm_brew_prefix: []const u8 = "/opt/homebrew";
|
||||
const x86_brew_prefix: []const u8 = "/usr/local";
|
||||
try dirs_to_search.append(b.env_map.get("BUN_DEPS_OUT_DIR") orelse b.env_map.get("BUN_DEPS_DIR") orelse @as([]const u8, b.pathFromRoot("src/deps")));
|
||||
if (target.getOsTag() == .macos) {
|
||||
if (target.getCpuArch().isAARCH64()) {
|
||||
try dirs_to_search.append(comptime arm_brew_prefix ++ "/opt/icu4c/lib/");
|
||||
} else {
|
||||
try dirs_to_search.append(comptime x86_brew_prefix ++ "/opt/icu4c/lib/");
|
||||
}
|
||||
}
|
||||
|
||||
if (b.env_map.get("JSC_LIB")) |jsc| {
|
||||
try dirs_to_search.append(jsc);
|
||||
}
|
||||
|
||||
var added = std.AutoHashMap(u64, void).init(b.allocator);
|
||||
|
||||
const files_we_care_about = std.ComptimeStringMap([]const u8, .{
|
||||
.{ "libmimalloc.o", "libmimalloc.o" },
|
||||
.{ "libz.a", "libz.a" },
|
||||
.{ "libarchive.a", "libarchive.a" },
|
||||
.{ "libssl.a", "libssl.a" },
|
||||
.{ "picohttpparser.o", "picohttpparser.o" },
|
||||
.{ "libcrypto.boring.a", "libcrypto.boring.a" },
|
||||
.{ "libicuuc.a", "libicuuc.a" },
|
||||
.{ "libicudata.a", "libicudata.a" },
|
||||
.{ "libicui18n.a", "libicui18n.a" },
|
||||
.{ "libJavaScriptCore.a", "libJavaScriptCore.a" },
|
||||
.{ "libWTF.a", "libWTF.a" },
|
||||
.{ "libbmalloc.a", "libbmalloc.a" },
|
||||
.{ "libbacktrace.a", "libbacktrace.a" },
|
||||
.{ "liblolhtml.a", "liblolhtml.a" },
|
||||
.{ "uSockets.a", "uSockets.a" },
|
||||
});
|
||||
|
||||
for (dirs_to_search.slice()) |deps_path| {
|
||||
var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch @panic("Failed to open dependencies directory");
|
||||
var iterator = deps_dir.iterate();
|
||||
obj.addIncludeDir(deps_path);
|
||||
obj.addLibPath(deps_path);
|
||||
|
||||
while (iterator.next() catch null) |entr| {
|
||||
const entry: std.fs.Dir.Entry = entr;
|
||||
if (files_we_care_about.get(entry.name)) |obj_name| {
|
||||
var has_added = try added.getOrPut(std.hash.Wyhash.hash(0, obj_name));
|
||||
if (!has_added.found_existing) {
|
||||
var paths = [_][]const u8{ deps_path, obj_name };
|
||||
obj.addObjectFile(try std.fs.path.join(b.allocator, &paths));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configureObjectStep(_: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype, main_pkg_path: []const u8) !void {
|
||||
obj.setMainPkgPath(main_pkg_path);
|
||||
|
||||
obj.setTarget(target);
|
||||
try addInternalPackages(obj, std.heap.page_allocator, target);
|
||||
if (target.getOsTag() != .freestanding)
|
||||
addPicoHTTP(obj, false);
|
||||
|
||||
obj.strip = false;
|
||||
obj.setOutputDir(output_dir);
|
||||
obj.setBuildMode(mode);
|
||||
if (target.getOsTag() != .freestanding) obj.linkLibC();
|
||||
if (target.getOsTag() != .freestanding) obj.bundle_compiler_rt = true;
|
||||
|
||||
if (target.getOsTag() == .linux) {
|
||||
// obj.want_lto = tar;
|
||||
obj.link_emit_relocs = true;
|
||||
obj.link_eh_frame_hdr = true;
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,149 +1,90 @@
|
||||
# This is terribly complicated
|
||||
# It's because:
|
||||
# 1. bun run has to have dynamic completions
|
||||
# 2. there are global options
|
||||
# 3. bun {install add remove} gets special options
|
||||
# 4. I don't know how to write fish completions well
|
||||
# Contributions very welcome!!
|
||||
|
||||
function __fish__get_bun_bins
|
||||
string split ' ' (bun getcompletes b)
|
||||
string split ' ' (bun getcompletes b)
|
||||
end
|
||||
|
||||
function __fish__get_bun_scripts
|
||||
set -lx SHELL bash
|
||||
set -lx MAX_DESCRIPTION_LEN 40
|
||||
string trim (string split '\n' (string split '\t' (bun getcompletes z)))
|
||||
end
|
||||
|
||||
function __fish__get_bun_packages
|
||||
if test (commandline -ct) != ""
|
||||
set -lx SHELL fish
|
||||
string split ' ' (bun getcompletes a (commandline -ct))
|
||||
end
|
||||
end
|
||||
|
||||
function __history_completions
|
||||
set -l tokens (commandline --current-process --tokenize)
|
||||
history --prefix (commandline) | string replace -r \^$tokens[1]\\s\* "" | string replace -r \^$tokens[2]\\s\* "" | string split ' '
|
||||
set -lx SHELL bash
|
||||
set -lx MAX_DESCRIPTION_LEN 40
|
||||
string trim (string split '\n' (string split '\t' (bun getcompletes z)))
|
||||
end
|
||||
|
||||
function __fish__get_bun_bun_js_files
|
||||
string split ' ' (bun getcompletes j)
|
||||
string split ' ' (bun getcompletes j)
|
||||
end
|
||||
|
||||
function bun_fish_is_nth_token --description 'Test if current token is on Nth place' --argument-names n
|
||||
set -l tokens (commandline -poc)
|
||||
set -l tokens (string replace -r --filter '^([^-].*)' '$1' -- $tokens)
|
||||
test (count $tokens) -eq "$n"
|
||||
set -l tokens (commandline -poc)
|
||||
set -l tokens (string replace -r --filter '^([^-].*)' '$1' -- $tokens)
|
||||
test (count $tokens) -eq "$n"
|
||||
end
|
||||
|
||||
function __bun_command_count --argument-names n
|
||||
set -l cmds (commandline -poc)
|
||||
set -l cmds (commandline -poc)
|
||||
|
||||
test (count cmds) -eq "$n"
|
||||
test (count cmds) -eq "$n"
|
||||
end
|
||||
|
||||
function __bun_last_cmd --argument-names n
|
||||
set -l cmds (commandline -poc)
|
||||
set -l cmds (commandline -poc)
|
||||
|
||||
test "(cmds[-1])" = "$n"
|
||||
test "(cmds[-1])" = "$n"
|
||||
end
|
||||
|
||||
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependenices" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
|
||||
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add
|
||||
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord install remove add
|
||||
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run install remove add
|
||||
set -l bun_builtin_cmds_without_install create dev help bun upgrade discord run remove add
|
||||
set -l bun_builtin_cmds_without_remove create dev help bun upgrade discord run install add
|
||||
set -l bun_builtin_cmds_without_add create dev help bun upgrade discord run remove install
|
||||
set -l bun_builtin_cmds_without_pm create dev help bun upgrade discord run
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord
|
||||
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord
|
||||
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bins)' -d 'package bin'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bins)' -d 'package bin'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_scripts)' -d 'script'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bun_js_files)' -d 'Bun.js'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from run" -a '(__fish__get_bun_bun_js_files)' -d 'Bun.js'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'run' -f -d 'Run a script or bin'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'run' -f -d 'Run a script or bin'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'u' -l 'origin' -r -d 'Server URL. Rewrites import paths'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'u' -l 'origin' -r -d 'Server URL. Rewrites import paths'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'd' -l 'define' -r -d 'Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\"'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'd' -l 'define' -r -d 'Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\"'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'e' -l 'external' -r -d 'Exclude module from transpilation (can use * wildcards). ex: -e react'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -s 'e' -l 'external' -r -d 'Exclude module from transpilation (can use * wildcards). ex: -e react'
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -l 'use' -r -d 'Use a framework (ex: next)'
|
||||
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts);" --no-files -l 'use' -r -d 'Use a framework (ex: next)'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'dev' -d 'Start dev server'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __fish_use_subcommand" -a 'dev' -d 'Start dev server'
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'create' -f -d 'Create a new project'
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'create' -f -d 'Create a new project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'next' -d 'new Next.js project'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'next' -d 'new Next.js project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'react' -d 'new React project'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create next react; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from create;" -a 'react' -d 'new React project'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade bun to the latest version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'upgrade' -d 'Upgrade Bun to the latest version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'bun\'s version' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'Bun\'s version' -x
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open bun\'s Discord server' -x
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open Bun\'s Discord server' -x
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); __fish_use_subcommand" -a 'bun' -d 'Generate a new bundle'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); __fish_use_subcommand" -a 'bun' -d 'Generate a new bundle'
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from bun" -F -d 'Bundle this'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_bun; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from bun" -F -d 'Bundle this'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from react; or __fish_seen_subcommand_from next" -F -d "Create in directory"
|
||||
|
||||
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'install' -f -d 'Install packages from package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'add' -F -d 'Add a package to package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand" -a 'remove' -F -d 'Remove a package from package.json'
|
||||
|
||||
complete -c bun \
|
||||
-n "bun_fish_is_nth_token 1; and not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) and __bun_command_count 1 and __fish_use_subcommand add remove" -F
|
||||
|
||||
|
||||
for i in (seq (count $bun_install_boolean_flags))
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l "$bun_install_boolean_flags[$i]" -d "$bun_install_boolean_flags_descriptions[$i]"
|
||||
end
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l 'cwd' -d 'Change working directory'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from install add remove;" -l 'cache-dir' -d 'Choose a cache directory (default: $HOME/.bun/install/cache)'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from add;" -d 'Popular' -a '(__fish__get_bun_packages)'
|
||||
|
||||
complete -c bun \
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from add;" -d 'History' -a '(__history_completions)'
|
||||
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_create; and not __fish_seen_subcommand_from (__fish__get_bun_bins); and not __fish_seen_subcommand_from (__fish__get_bun_scripts); and __fish_seen_subcommand_from react; or __fish_seen_subcommand_from next" -F -d "Create in directory"
|
||||
|
||||
complete -c bun --no-files
|
||||
@@ -15,54 +15,12 @@ _bun() {
|
||||
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
|
||||
compadd $scripts_list && ret=0
|
||||
|
||||
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of bun"')
|
||||
main_commands=('bun:"Generate a bundle" create:"Create a new project" dev:"Start a dev server" help:"Show command help" run:"Run a script or package bin" upgrade:"Upgrade to the latest version of Bun"')
|
||||
main_commands=($main_commands)
|
||||
_alternative "args:bun:(($main_commands))"
|
||||
_alternative "args:Bun:(($main_commands))"
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
add)
|
||||
|
||||
# ---- Command: add
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'*: :->package' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'-g[Add a package globally]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
package)
|
||||
_bun_add_param_package_completion
|
||||
;;
|
||||
esac
|
||||
|
||||
;;
|
||||
bun)
|
||||
|
||||
# ---- Command: bun
|
||||
@@ -206,57 +164,6 @@ _bun() {
|
||||
ret=0
|
||||
|
||||
;;
|
||||
install)
|
||||
|
||||
# ---- Command: help install
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-g[Add a package globally]' \
|
||||
'--global[Add a package globally]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
remove)
|
||||
|
||||
# ---- Command: help remove
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'-g[Remove a package globally]' \
|
||||
'--global[Remove a package globally]' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
run)
|
||||
|
||||
# ---- Command: help run
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'2: :->cmd2' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--cwd[Change directory]:cwd' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--all[]' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
create)
|
||||
|
||||
@@ -368,74 +275,6 @@ _bun() {
|
||||
;;
|
||||
|
||||
esac
|
||||
;;
|
||||
install)
|
||||
|
||||
# ---- Command: install
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'-g[Add a package globally]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
;;
|
||||
|
||||
remove)
|
||||
|
||||
# ---- Command: remove
|
||||
_arguments -s -C \
|
||||
'1: :->cmd1' \
|
||||
'*: :->package' \
|
||||
'--version[Show version and exit]' \
|
||||
'-V[Show version and exit]' \
|
||||
'--help[Show command help]' \
|
||||
'-h[Show command help]' \
|
||||
'--registry[Change default registry (default: \$BUN_CONFIG_REGISTRY || \$npm_config_registry)]:registry' \
|
||||
'--token[Authentication token used for npm registry requests (default: \$npm_config_token)]:token' \
|
||||
'-y[Write a yarn.lock file (yarn v1)]' \
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'-g[Remove a package globally]' \
|
||||
'--global[Remove a package globally]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]:lockfile' \
|
||||
'--cache-dir[Store & load cached data from a specific directory path]:cache-dir' \
|
||||
'--no-cache[Ignore manifest cache entirely]' \
|
||||
'--silent[Don'"'"'t output anything]' \
|
||||
'--verbose[Excessively verbose logging]' \
|
||||
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
|
||||
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
|
||||
ret=0
|
||||
|
||||
case $state in
|
||||
package) ;;
|
||||
|
||||
esac
|
||||
|
||||
;;
|
||||
run)
|
||||
|
||||
@@ -508,39 +347,6 @@ _bun_run_param_script_completion() {
|
||||
fi
|
||||
}
|
||||
|
||||
_set_remove() {
|
||||
comm -23 <(echo $1 | sort | tr " " "\n") <(echo $2 | sort | tr " " "\n") 2>/dev/null
|
||||
}
|
||||
|
||||
_bun_add_param_package_completion() {
|
||||
|
||||
IFS=$'\n' inexact=($(history -n bun | grep -E "^bun add " | cut -c 9- | uniq))
|
||||
IFS=$'\n' exact=($($inexact | grep -E "^$words[$CURRENT]"))
|
||||
IFS=$'\n' packages=($(SHELL=zsh bun getcompletes a $words[$CURRENT]))
|
||||
|
||||
to_print=$inexact
|
||||
if [ ! -z "$exact" -a "$exact" != " " ]; then
|
||||
to_print=$exact
|
||||
fi
|
||||
|
||||
if [ ! -z "$to_print" -a "$to_print" != " " ]; then
|
||||
if [ ! -z "$packages" -a "$packages" != " " ]; then
|
||||
_describe -1 -t to_print 'History' to_print
|
||||
_describe -1 -t packages "Popular" packages
|
||||
return
|
||||
fi
|
||||
|
||||
_describe -1 -t to_print 'History' to_print
|
||||
return
|
||||
fi
|
||||
|
||||
if [ ! -z "$packages" -a "$packages" != " " ]; then
|
||||
_describe -1 -t packages "Popular" packages
|
||||
return
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
__bun_dynamic_comp() {
|
||||
local comp=""
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ subcommands:
|
||||
summary: Use a framework, e.g. "next"
|
||||
|
||||
upgrade:
|
||||
summary: Upgrade to the latest version of bun
|
||||
summary: Upgrade to the latest version of Bun
|
||||
|
||||
dev:
|
||||
summary: Start a dev server
|
||||
@@ -102,122 +102,6 @@ subcommands:
|
||||
- &port name: port
|
||||
type: int
|
||||
summary: Port number
|
||||
install:
|
||||
summary: Install packages from package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
add:
|
||||
summary: Add a dependency to package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- optional -- "Add dependency to optionalDependencies"
|
||||
- development -- "Add dependency to devDependencies"
|
||||
- d -- "Add dependency to devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
parameters:
|
||||
- name: package
|
||||
multiple: true
|
||||
type: string
|
||||
required: true
|
||||
remove:
|
||||
summary: Remove a dependency from package.json
|
||||
options:
|
||||
- name: registry
|
||||
type: string
|
||||
summary: "Change default registry (default: $BUN_CONFIG_REGISTRY || $npm_config_registry)"
|
||||
- name: token
|
||||
type: string
|
||||
summary: "Authentication token used for npm registry requests (default: $npm_config_token)"
|
||||
- y -- "Write a yarn.lock file (yarn v1)"
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: lockfile
|
||||
type: string
|
||||
summary: "Store & load a lockfile at a specific filepath"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
- name: cwd
|
||||
type: string
|
||||
summary: "Set a specific cwd"
|
||||
- name: backend
|
||||
summary: "Platform-specific optimizations for installing dependencies"
|
||||
type: string
|
||||
enum: ["clonefile", "copyfile", "hardlink", "clonefile_each_dir"]
|
||||
- name: link-native-bins
|
||||
summary: 'Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo'
|
||||
parameters:
|
||||
- name: package
|
||||
multiple: true
|
||||
type: string
|
||||
required: true
|
||||
|
||||
parameters:
|
||||
- name: sasdasdds
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
# Bun-flavored TOML
|
||||
|
||||
[TOML](https://toml.io/) is a minimal configuration file format designed to be easy for humans to read.
|
||||
|
||||
Bun implements a TOML parser with a few tweaks designed for better interopability with INI files and with JavaScript.
|
||||
|
||||
### ; and # are comments
|
||||
|
||||
In Bun-flavored TOML, comments start with `#` or `;`
|
||||
|
||||
```ini
|
||||
# This is a comment
|
||||
; This is also a comment
|
||||
```
|
||||
|
||||
This matches the behavior of INI files.
|
||||
|
||||
In TOML, comments start with `#`
|
||||
|
||||
```toml
|
||||
# This is a comment
|
||||
```
|
||||
|
||||
### String escape characters
|
||||
|
||||
Bun-flavored adds a few more escape sequences to TOML to work better with JavaScript strings.
|
||||
|
||||
```
|
||||
# Bun-flavored TOML extras
|
||||
\x{XX} - ASCII (U+00XX)
|
||||
\u{x+} - unicode (U+0000000X) - (U+XXXXXXXX)
|
||||
\v - vertical tab
|
||||
|
||||
# Regular TOML
|
||||
\b - backspace (U+0008)
|
||||
\t - tab (U+0009)
|
||||
\n - linefeed (U+000A)
|
||||
\f - form feed (U+000C)
|
||||
\r - carriage return (U+000D)
|
||||
\" - quote (U+0022)
|
||||
\\ - backslash (U+005C)
|
||||
\uXXXX - unicode (U+XXXX)
|
||||
\UXXXXXXXX - unicode (U+XXXXXXXX)
|
||||
```
|
||||
@@ -1,57 +0,0 @@
|
||||
# Upgrading WebKit
|
||||
|
||||
Bun uses [a fork](https://github.com/Jarred-Sumner/WebKit) of WebKit with a small number of changes.
|
||||
|
||||
It's important to periodically update WebKit for many reasons:
|
||||
|
||||
- Security
|
||||
- Performance
|
||||
- Compatibility
|
||||
- …and many more.
|
||||
|
||||
To upgrade, first find the commit in **bun's WebKit fork** (not bun!) between when we last upgraded and now.
|
||||
|
||||
```bash
|
||||
cd src/javascript/jsc/WebKit # In the WebKit directory! not bun
|
||||
git checkout $COMMIT
|
||||
```
|
||||
|
||||
This is the main command to run:
|
||||
|
||||
```bash
|
||||
git pull https://github.com/WebKit/WebKit.git main --no-rebase --allow-unrelated-histories -X theirs
|
||||
```
|
||||
|
||||
Then, you will likely see some silly merge conflicts. Fix them and then run:
|
||||
|
||||
```bash
|
||||
# You might have to run this multiple times.
|
||||
rm -rf WebKitBuild
|
||||
|
||||
# Go to Bun's directory! Not WebKit.
|
||||
cd ../../../../
|
||||
make jsc-build-mac-compile
|
||||
```
|
||||
|
||||
Make sure that JSC's CLI is able to load successfully. This verifies that the build is working.
|
||||
|
||||
You know this worked when it printed help options. If it complains about symbols, crashes, or anything else that looks wrong, something is wrong.
|
||||
|
||||
```bash
|
||||
src/javascript/jsc/WebKit/WebKitBuild/Release/bin/jsc --help
|
||||
```
|
||||
|
||||
Then, clear out our bindings and regenerate the C++<>Zig headers:
|
||||
|
||||
```bash
|
||||
make clean-bindings jsc-bindings-headers generate-builtins
|
||||
```
|
||||
|
||||
Now update Bun's bindings wherever there are compiler errors:
|
||||
|
||||
```bash
|
||||
# It will take awhile if you don't pass -j here
|
||||
make jsc-bindings-mac -j10
|
||||
```
|
||||
|
||||
This is the hard part. It might involve digging through WebKit's commit history to figure out what changed and why. Fortunately, WebKit contributors write great commit messages.
|
||||
@@ -1,18 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-executable-page-protection</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.get-task-allow</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,7 +0,0 @@
|
||||
#[no_mangle]
|
||||
pub extern "C" fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// rustc --crate-type cdylib add.rs
|
||||
@@ -1,12 +0,0 @@
|
||||
import { dlopen, suffix } from "bun:ffi";
|
||||
|
||||
const {
|
||||
symbols: { add },
|
||||
} = dlopen(`./libadd.${suffix}`, {
|
||||
add: {
|
||||
args: ["i32", "i32"],
|
||||
returns: "i32",
|
||||
},
|
||||
});
|
||||
|
||||
console.log(add(1, 2));
|
||||
@@ -1,6 +0,0 @@
|
||||
pub export fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// zig build-lib -OReleaseFast ./add.zig -dynamic --name add
|
||||
@@ -1,6 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
const { argv } = process;
|
||||
|
||||
const path = resolve(argv.at(-1));
|
||||
await write(stdout, file(path));
|
||||
@@ -1,11 +0,0 @@
|
||||
const sequence = [1, 2, 3];
|
||||
sequence.toReversed(); // => [3, 2, 1]
|
||||
sequence; // => [1, 2, 3]
|
||||
|
||||
const outOfOrder = new Uint8Array([3, 1, 2]);
|
||||
outOfOrder.toSorted(); // => Uint8Array [1, 2, 3]
|
||||
outOfOrder; // => Uint8Array [3, 1, 2]
|
||||
|
||||
const correctionNeeded = [1, 1, 3];
|
||||
correctionNeeded.with(1, 2); // => [1, 2, 3]
|
||||
correctionNeeded; // => [1, 1, 3]
|
||||
@@ -1,18 +0,0 @@
|
||||
// Accepts a string, TypedArray, or Blob (file blob supported is not implemented but planned)
|
||||
const input = "hello world".repeat(400);
|
||||
|
||||
// Bun.hash() defaults to Wyhash because it's fast
|
||||
console.log(Bun.hash(input));
|
||||
|
||||
console.log(Bun.hash.wyhash(input));
|
||||
// and returns a number
|
||||
// all of these hashing functions return numbers, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input));
|
||||
console.log(Bun.hash.crc32(input));
|
||||
console.log(Bun.hash.cityHash32(input));
|
||||
console.log(Bun.hash.cityHash64(input));
|
||||
console.log(Bun.hash.murmur32v3(input));
|
||||
console.log(Bun.hash.murmur64v2(input));
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
@@ -1,47 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
Bun.serve({
|
||||
async fetch(req) {
|
||||
const { pathname } = new URL(req.url);
|
||||
if (
|
||||
!(pathname.startsWith("/https://") || pathname.startsWith("/http://"))
|
||||
) {
|
||||
return new Response(
|
||||
"Enter a path that starts with https:// or http://\n",
|
||||
{
|
||||
status: 400,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
req.url.substring("http://localhost:3000/".length),
|
||||
req.clone()
|
||||
);
|
||||
|
||||
return new HTMLRewriter()
|
||||
.on("a[href]", {
|
||||
element(element: Element) {
|
||||
element.setAttribute(
|
||||
"href",
|
||||
"https://www.youtube.com/watch?v=dQw4w9WgXcQ"
|
||||
);
|
||||
},
|
||||
})
|
||||
.transform(response);
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
@@ -1,24 +0,0 @@
|
||||
const { serve, file, resolveSync } = Bun;
|
||||
const { path } = import.meta;
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(file(new URL(req.url).pathname.substring(1)));
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
const server = serve({
|
||||
fetch(req) {
|
||||
return new Response(`Pending requests count: ${this.pendingRequests}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Stop the server after 5 seconds
|
||||
setTimeout(() => {
|
||||
server.stop();
|
||||
}, 5000);
|
||||
@@ -1,34 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
Bun.serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(`Echo: ${req.url}`);
|
||||
},
|
||||
|
||||
// baseURI: "http://localhost:3000",
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(\n" + err.toString(), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables bun's default error handler
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
// Start a fast HTTP server from the main file's export
|
||||
// export default {
|
||||
// fetch(req) {
|
||||
// return new Response(
|
||||
// `This is another way to start a server!
|
||||
// if the main file export default's an object
|
||||
// with 'fetch'. Bun automatically calls Bun.serve`
|
||||
// );
|
||||
// },
|
||||
// // so autocomplete & type checking works
|
||||
// } as Bun.Serve;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user