Compare commits

..

43 Commits

Author SHA1 Message Date
Jarred Sumner
26ee46ad5b Bump 2021-10-28 19:05:27 -07:00
Jarred Sumner
417c4e0faa - Fix consistentcy issue with runtime hash
- Fix edgecases in strings.eqlComptime by simplifying the implementation
2021-10-28 19:03:49 -07:00
Jarred Sumner
b6675dd0ab Explicitly check content type 2021-10-28 17:33:40 -07:00
Jarred Sumner
dff40c5220 FIx webkit commit 2021-10-28 17:08:28 -07:00
Jarred Sumner
1fd426432e Fix spacing, add check that the version matches before we push 2021-10-28 17:06:42 -07:00
Jarred Sumner
97c43ef787 Don't run bun upgrade in the makefile 2021-10-28 17:02:27 -07:00
Jarred Sumner
a546e56669 Update Makefile 2021-10-28 16:55:22 -07:00
Jarred Sumner
4b52192974 Update upgrade_command.zig 2021-10-28 16:52:57 -07:00
Jarred Sumner
fb10e0c4cb Upgrade checker + polish for bun upgrade 2021-10-28 16:39:26 -07:00
Jarred Sumner
a44cb9708f New subcommand: bun upgrade. It upgrades bun to the latest version. 2021-10-28 05:34:38 -07:00
Jarred Sumner
c7fc08056a part 1 of try to fix the npm install process 2021-10-27 17:38:05 -07:00
Jarred Sumner
12ba37e43a Update build-id 2021-10-27 15:27:03 -07:00
Jarred Sumner
928346b842 Verify package installs successfully for each platform 2021-10-27 15:26:04 -07:00
Jarred SUmner
b81c895e1d Add test that verifies installing bun with yarn & npm works before publishing 2021-10-27 04:26:38 -07:00
Jarred Sumner
d434e5aeac Make the installation more careful 2021-10-27 04:12:13 -07:00
Jarred Sumner
2250db41b4 Add universal macOS build so that Bun works on rosetta builds of Node.js 2021-10-27 02:41:45 -07:00
Jarred Sumner
826db9021e Before publishing the binary to npm, verify it installs successfully and it's the expected version 2021-10-27 01:42:50 -07:00
Jarred Sumner
ad61279621 Fix aarch64 build errors 2021-10-27 01:22:02 -07:00
Jarred Sumner
5ce1b36130 Choose different clang for macOS and Linux 2021-10-26 22:57:49 -07:00
Jarred SUmner
a1f17f70eb Suggest running tests in build instructions 2021-10-26 22:38:36 -07:00
Jarred SUmner
a0f086ba85 Fix Linux x64 build 2021-10-26 22:37:03 -07:00
Jarred Sumner
8d095a235f Add test that checks JIT is enabled for JavaScriptCore and crashes if it isn't 2021-10-26 19:07:37 -07:00
Jarred Sumner
ff1417421b Simple integration test for bun run 2021-10-26 19:06:45 -07:00
Jarred Sumner
00fb04f309 Update Makefile 2021-10-26 18:47:43 -07:00
Jarred Sumner
a3cc17fbe6 Add a very simple integration test for bun create next & bun create react 2021-10-26 18:46:53 -07:00
Jarred Sumner
fd27d403d7 Aligning text in terminals is tedious 2021-10-26 17:48:33 -07:00
Jarred Sumner
6d29e314a0 Switch to using optionalDependencies for installing the packages instead of a postinstall script.
Sorry yarn v1 users :(
2021-10-26 17:37:44 -07:00
Jarred Sumner
902e438e6c Update README.md 2021-10-26 17:26:45 -07:00
Jarred Sumner
096d668d83 Bun is now a task runner as well 2021-10-26 17:24:09 -07:00
Jarred Sumner
49c5c32714 More spacing 2021-10-26 17:19:45 -07:00
Jarred Sumner
9a70d8daa2 Update README.md 2021-10-26 17:15:35 -07:00
Jarred Sumner
79eb8d9d78 Update README.md 2021-10-26 17:12:22 -07:00
Jarred Sumner
221b280856 spacing 2021-10-26 17:09:38 -07:00
Jarred Sumner
933378ab80 Update README.md 2021-10-26 16:58:57 -07:00
Jarred Sumner
bd51d194cf Add .cjs, .mts, and .cts to the list of file extensions to check for imports 2021-10-26 16:58:51 -07:00
Jarred Sumner
1e27c7d8ce [bun run] Add to README 2021-10-26 16:49:48 -07:00
Jarred Sumner
7d554ed175 [bun run] Fix passthrough behavior 2021-10-26 16:49:34 -07:00
Jarred Sumner
e72d765961 Spacing 2021-10-26 16:18:57 -07:00
Jarred Sumner
9ce38b86ab Fix edgecase with options loading attempting to create an output directory when it shouldn't 2021-10-26 16:18:51 -07:00
Jarred Sumner
05bdda7b07 Update clap.zig 2021-10-26 15:47:14 -07:00
Jarred Sumner
d68f18c77a [bun run] Add node_modules/.bin in reverse order, print failure on exit code 2021-10-26 15:47:08 -07:00
Jarred Sumner
ae6349e573 [.env loader] Fix bug with PATH values sometimes getting cut off at the end by one character 2021-10-26 15:30:26 -07:00
Jarred Sumner
0541cff2ad New subcommand: bun run run scripts in package.json really fast 2021-10-26 05:18:21 -07:00
47 changed files with 2260 additions and 875 deletions

6
.gitignore vendored
View File

@@ -64,7 +64,11 @@ release/
sign.*.json
packages/debug-*
packages/bun-cli/postinstall.js
packages/bun-*/bin/*
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-*/*.o
packages/bun-cli/postinstall.js
packages/bun-cli/bin/*
bun-test-scratch

2
.gitmodules vendored
View File

@@ -24,4 +24,4 @@
[submodule "src/deps/s2n-tls"]
path = src/deps/s2n-tls
url = https://github.com/Jarred-Sumner/s2n-tls
ignore = dirty
ignore = dirty

96
.vscode/launch.json vendored
View File

@@ -1,15 +1,6 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "tgz debug",
"program": "${workspaceFolder}/tgz",
"args": ["./bun-examples-all-0.0.1634264480817.tgz"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -23,16 +14,25 @@
"type": "lldb",
"request": "launch",
"name": "bun create debug",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["create", "hanford/trends", "foo"],
"cwd": "/tmp/",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run debug",
"program": "bun-debug",
"args": ["paoskdpoasdk"],
"cwd": "/tmp/",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun a debug",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["create", "https://github.com/ahfarmer/calculator", "--force"],
"cwd": "/tmp/",
"console": "internalConsole"
@@ -41,7 +41,7 @@
"type": "lldb",
"request": "launch",
"name": "bun routes",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "${workspaceFolder}/integration/apps/routing",
"console": "internalConsole"
@@ -64,7 +64,7 @@
"type": "lldb",
"request": "launch",
"name": ".bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"./examples/hello-next/node_modules.server.bun"
// "--origin=https://localhost:9000/"
@@ -76,7 +76,7 @@
"type": "lldb",
"request": "launch",
"name": "Discord",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"discord"
// "--origin=https://localhost:9000/"
@@ -101,7 +101,7 @@
"type": "lldb",
"request": "launch",
"name": "Dev Launch",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"./simple.css",
"--resolve=dev",
@@ -135,7 +135,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo Serve",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
// "args": ["--serve", "--origin=http://localhost:3000"],
"args": ["dev", "--origin=http://localhost:3000"],
"cwd": "${workspaceFolder}/examples/hello-next",
@@ -155,7 +155,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo Lazy Build",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"./src/index.tsx",
"--resolve=lazy",
@@ -169,7 +169,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo Build",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"./src/index.tsx",
"--resolve=dev",
@@ -184,7 +184,7 @@
"type": "lldb",
"request": "launch",
"name": "Dazzle serve",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["--origin=http://localhost:5000", "--disable-hmr"],
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
"console": "internalConsole"
@@ -193,7 +193,7 @@
"type": "lldb",
"request": "launch",
"name": "Bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "--use=next"],
"cwd": "/Users/jarred/Build/lattice/apps/dazzle",
"console": "internalConsole"
@@ -203,7 +203,7 @@
"request": "launch",
"name": "unicode check",
"args": ["--disable-hmr"],
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"cwd": "/Users/jarred/Build/app994",
"console": "internalConsole"
},
@@ -212,7 +212,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo .bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "--use=bun-framework-next"],
"cwd": "${workspaceFolder}/examples/hello-next",
"console": "internalConsole"
@@ -221,7 +221,7 @@
"type": "lldb",
"request": "launch",
"name": "PNPM .bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "./pages/index.js"],
"cwd": "/Users/jarred/Build/pnpm-bun/packages/app",
"console": "internalConsole"
@@ -231,7 +231,7 @@
"type": "lldb",
"request": "launch",
"name": "PNPM serve",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "/Users/jarred/Build/pnpm-bun/packages/app",
"console": "internalConsole"
@@ -240,7 +240,7 @@
"type": "lldb",
"request": "launch",
"name": "Mixed case resolve",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"build",
"./index.js",
@@ -255,7 +255,7 @@
"type": "lldb",
"request": "launch",
"name": "Build .bun lotta-modules",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "./index.js", "--platform=browser"],
"cwd": "${workspaceFolder}/examples/lotta-modules/",
"console": "internalConsole"
@@ -264,7 +264,7 @@
"type": "lldb",
"request": "launch",
"name": "Dev CRA",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["dev", "--platform=browser"],
"cwd": "${workspaceFolder}/examples/hello-create-react-app/",
"console": "internalConsole"
@@ -273,7 +273,7 @@
"type": "lldb",
"request": "launch",
"name": "Fragment",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "${workspaceFolder}/src/test/fixtures",
"console": "internalConsole"
@@ -282,7 +282,7 @@
"type": "lldb",
"request": "launch",
"name": "Context Bun Bug",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "./code.js"],
"cwd": "/Users/jarred/Build/context/www",
"console": "internalConsole"
@@ -291,7 +291,7 @@
"type": "lldb",
"request": "launch",
"name": "Context Bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["bun", "--use=next"],
"cwd": "/Users/jarred/Build/context/www",
"console": "internalConsole"
@@ -300,7 +300,7 @@
"type": "lldb",
"request": "launch",
"name": "Bun-hello",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "${workspaceFolder}/packages/bun-hello",
"console": "internalConsole"
@@ -309,7 +309,7 @@
"type": "lldb",
"request": "launch",
"name": "Integration Test Dev",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "${workspaceFolder}/integration/snippets",
"console": "internalConsole"
@@ -318,7 +318,7 @@
"type": "lldb",
"request": "launch",
"name": "Integration Test Dev (no hmr)",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["--disable-hmr"],
"cwd": "${workspaceFolder}/integration/snippets",
"console": "internalConsole"
@@ -346,7 +346,7 @@
"request": "launch",
"name": "Context Dev",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [],
"cwd": "/Users/jarred/Build/context/www",
"console": "internalConsole"
@@ -365,7 +365,7 @@
"type": "lldb",
"request": "launch",
"name": "Debug Dev",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["dev"],
"cwd": "${workspaceFolder}/examples/hello-next",
"console": "internalConsole"
@@ -374,7 +374,7 @@
"type": "lldb",
"request": "launch",
"name": "Type-only import",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"bun",
"./src/test/fixtures/type-only-import.ts",
@@ -387,7 +387,7 @@
"type": "lldb",
"request": "launch",
"name": "Dev lotta-modules",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["dev", "./index.js", "--platform=browser"],
"cwd": "${workspaceFolder}/examples/lotta-modules/",
"console": "internalConsole"
@@ -396,7 +396,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo Build .bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": [
"bun",
// "./index.js",
@@ -411,7 +411,7 @@
"type": "lldb",
"request": "launch",
"name": "PNPM Resolve symlink",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["--resolve=dev", "test-pnpm.js", "--platform=browser"],
"cwd": "${workspaceFolder}/examples/css-stress-test",
"console": "internalConsole"
@@ -420,7 +420,7 @@
"type": "lldb",
"request": "launch",
"name": "Demo Print .bun",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "bun-debug",
"args": ["./node_modules.bun"],
"cwd": "${workspaceFolder}/examples/simple-react",
"console": "internalConsole"
@@ -449,7 +449,7 @@
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch (other)",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
// "args": ["./simple.jsx", "--resolve=disable"],
// "cwd": "${workspaceFolder}/src/test/fixtures",
// "console": "internalConsole"
@@ -458,7 +458,7 @@
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
// "preLaunchTask": "build",
// "args": [
// "--resolve=disable",
@@ -473,7 +473,7 @@
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
// "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
@@ -490,7 +490,7 @@
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
// "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
@@ -507,7 +507,7 @@
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
// // "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
@@ -526,7 +526,7 @@
"type": "lldb",
"request": "launch",
"name": "Rome",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
"program": "${workspaceFolder}/build/macos-x86_64/bun",
// "preLaunchTask": "build",
"args": [
@@ -548,8 +548,8 @@
"type": "lldb",
"request": "launch",
"name": "Rome Dev",
// "program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
// "program": "bun-debug",
"program": "bun-debug",
// "preLaunchTask": "build",
"args": [
"--resolve=dev",

230
Makefile
View File

@@ -1,5 +1,8 @@
SHELL := /bin/bash # Use bash syntax to be consistent
OS_NAME := $(shell uname -s | tr '[:upper:]' '[:lower:]')
ARCH_NAME_RAW := $(shell uname -m)
BUN_AUTO_UPDATER_REPO = Jarred-Sumner/bun-releases-for-updater
make-lazy = $(eval $1 = $$(eval $1 := $(value $(1)))$$($1))
@@ -13,19 +16,25 @@ else
endif
TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-cli-$(TRIPLET)
PACKAGE_NAME = bun-$(TRIPLET)
PACKAGES_REALPATH = $(realpath packages)
PACKAGE_DIR = $(PACKAGES_REALPATH)/$(PACKAGE_NAME)
DEBUG_PACKAGE_DIR = $(PACKAGES_REALPATH)/debug-$(PACKAGE_NAME)
BIN_DIR = $(PACKAGE_DIR)/bin
RELEASE_BUN = $(PACKAGE_DIR)/bin/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)/bin
RELEASE_BUN = $(PACKAGE_DIR)/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)/
DEBUG_BUN = $(DEBUG_BIN)/bun-debug
BUILD_ID = $(shell cat ./build-id)
PACKAGE_JSON_VERSION = 0.0.$(BUILD_ID)
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
CC ?= $(realpath clang)
CXX ?= $(realpath clang++)
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
# We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore
# If we don't do this, strange memory allocation failures occur.
# This is easier to happen than you'd expect.
CC = $(shell which clang-12 || which clang)
CXX = $(shell which clang++-12 || which clang++)
DEPS_DIR = $(shell pwd)/src/deps
CPUS ?= $(shell nproc)
USER ?= $(echo $USER)
@@ -67,6 +76,7 @@ build-iconv-linux:
cd src/deps/libiconv/libiconv-1.16; ./configure --enable-static; make -j 12; cp ./lib/.libs/libiconv.a $(DEPS_DIR)/libiconv.a
BUN_TMP_DIR := /tmp/make-bun
BUN_DEPLOY_DIR = /tmp/bun-v$(PACKAGE_JSON_VERSION)/$(PACKAGE_NAME)
DEFAULT_USE_BMALLOC := 1
# ifeq ($(OS_NAME),linux)
@@ -266,20 +276,14 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = src/deps/mimalloc/libmimalloc.a \
src/deps/zlib/libz.a \
src/deps/libarchive.a \
src/deps/libs2n.a \
src/deps/picohttpparser.o
src/deps/picohttpparser.o \
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO) src/deps/libcrypto.a
BUN_LLD_FLAGS = $(OBJ_FILES) \
${ICU_FLAGS} \
${JSC_FILES} \
$(ARCHIVE_FILES) \
$(LIBICONV_PATH) \
$(CLANG_FLAGS)
PLATFORM_LINKER_FLAGS =
ifeq ($(OS_NAME), linux)
BUN_LLD_FLAGS += -lstdc++fs \
$(DEFAULT_LINKER_FLAGS) \
PLATFORM_LINKER_FLAGS = -lstdc++fs \
-lc \
-Wl,-z,now \
-Wl,--as-needed \
@@ -287,14 +291,25 @@ BUN_LLD_FLAGS += -lstdc++fs \
-Wl,-z,notext \
-ffunction-sections \
-fdata-sections \
-Wl,--gc-sections \
-fuse-ld=lld
-Wl,--gc-sections
endif
BUN_LLD_FLAGS = $(OBJ_FILES) \
${ICU_FLAGS} \
${JSC_FILES} \
$(ARCHIVE_FILES) \
$(LIBICONV_PATH) \
$(CLANG_FLAGS) \
$(DEFAULT_LINKER_FLAGS) \
$(PLATFORM_LINKER_FLAGS)
bun: vendor build-obj bun-link-lld-release
vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib openssl s2n libarchive
vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib openssl s2n bzip2 libarchive
libarchive:
cd src/deps/libarchive; \
@@ -306,10 +321,10 @@ libarchive:
cp ./.libs/libarchive.a $(DEPS_DIR)/libarchive.a;
tgz:
zig build-exe -Drelease-fast --main-pkg-path $(shell pwd) ./misctools/tgz.zig $(DEPS_DIR)/zlib/libz.a $(DEPS_DIR)/libarchive.a $(LIBICONV_PATH) -lc
zig build-exe -Drelease-fast --main-pkg-path $(shell pwd) ./misctools/tgz.zig $(DEPS_DIR)/zlib/libz.a $(DEPS_DIR)/libarchive.a $(LIBICONV_PATH) -lc
tgz-debug:
zig build-exe --main-pkg-path $(shell pwd) ./misctools/tgz.zig $(DEPS_DIR)/zlib/libz.a $(DEPS_DIR)/libarchive.a $(LIBICONV_PATH) -lc
zig build-exe --main-pkg-path $(shell pwd) ./misctools/tgz.zig $(DEPS_DIR)/zlib/libz.a $(DEPS_DIR)/libarchive.a $(LIBICONV_PATH) -lc
vendor: require init-submodules vendor-without-check
@@ -341,7 +356,7 @@ sign-macos-aarch64:
cls:
@echo "\n\n---\n\n"
release: all-js build-obj jsc-bindings-mac cls bun-link-lld-release
release: all-js build-obj jsc-bindings-mac cls bun-link-lld-release release-bin-entitlements
jsc-check:
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/javascript/jsc/WebKit -f $(shell pwd)/src/javascript/jsc/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@@ -350,8 +365,6 @@ jsc-check:
all-js: runtime_js fallback_decoder bun_error node-fallbacks
bin-dir:
@echo $(BIN_DIR)
api:
pnpm install; ./node_modules/.bin/peechy --schema src/api/schema.peechy --esm src/api/schema.js --ts src/api/schema.d.ts --zig src/api/schema.zig
@@ -371,6 +384,10 @@ runtime_js:
bun_error:
@cd packages/bun-error; pnpm install; npm run --silent build
generate-install-script:
@rm -f $(PACKAGES_REALPATH)/bun/install.js
@esbuild --log-level=error --define:BUN_VERSION="\"$(PACKAGE_JSON_VERSION)\"" --define:process.env.NODE_ENV="\"production\"" --platform=node --format=cjs $(PACKAGES_REALPATH)/bun/install.ts > $(PACKAGES_REALPATH)/bun/install.js
fetch:
cd misctools; zig build-obj -Drelease-fast ./fetch.zig -fcompiler-rt -lc --main-pkg-path ../
$(CXX) ./misctools/fetch.o -g -O3 -o ./misctools/fetch $(DEFAULT_LINKER_FLAGS) -lc \
@@ -435,53 +452,90 @@ jsc-build: $(JSC_BUILD_STEPS)
jsc-bindings: jsc-bindings-headers jsc-bindings-mac
jsc-bindings-headers:
rm -f /tmp/build-jsc-headers src/javascript/jsc/bindings/headers.zig
touch src/javascript/jsc/bindings/headers.zig
mkdir -p src/javascript/jsc/bindings-obj/
zig build headers
zig build headers-obj
$(CXX) $(PLATFORM_LINKER_FLAGS) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES);
/tmp/build-jsc-headers
zig translate-c src/javascript/jsc/bindings/headers.h > src/javascript/jsc/bindings/headers.zig
zig run misctools/headers-cleaner.zig -lc
sed -i '/pub const int/d' src/javascript/jsc/bindings/headers.zig || echo "";
sed -i '/pub const uint/d' src/javascript/jsc/bindings/headers.zig || echo "";
sed -i '/pub const intmax/d' src/javascript/jsc/bindings/headers.zig || echo "";
sed -i '/pub const uintmax/d' src/javascript/jsc/bindings/headers.zig || echo "";
sed -i '/pub const max_align_t/{N;N;N;d;}' src/javascript/jsc/bindings/headers.zig
sed -i '/pub const ZigErrorCode/d' src/javascript/jsc/bindings/headers.zig
sed -i '/pub const JSClassRef/d' src/javascript/jsc/bindings/headers.zig
zig fmt src/javascript/jsc/bindings/headers.zig
bump:
expr $(BUILD_ID) + 1 > build-id
build_postinstall:
@esbuild --bundle --format=cjs --platform=node --define:BUN_VERSION="\"$(PACKAGE_JSON_VERSION)\"" packages/bun-cli/scripts/postinstall.ts > packages/bun-cli/postinstall.js
write-package-json-version-cli: build_postinstall
jq -S --raw-output '.version = "${PACKAGE_JSON_VERSION}"' packages/bun-cli/package.json > packages/bun-cli/package.json.new
mv packages/bun-cli/package.json.new packages/bun-cli/package.json
write-package-json-version:
jq -S --raw-output '.version = "${PACKAGE_JSON_VERSION}"' $(PACKAGE_DIR)/package.json > $(PACKAGE_DIR)/package.json.new
mv $(PACKAGE_DIR)/package.json.new $(PACKAGE_DIR)/package.json
tag:
git tag $(BUN_BUILD_TAG)
git push --tags
prepare-release: tag release-create write-package-json-version-cli write-package-json-version
prepare-release: tag release-create
release-create-auto-updater:
release-create:
gh release create --title "Bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)"
gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "Bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/Jarred-Sumner/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install Bun. Join Bun's Discord to get access https://bun.sh/discord"
BUN_DEPLOY_DIR := $(BUN_TMP_DIR)/bun-deploy
BUN_DEPLOY_CLI := $(BUN_TMP_DIR)/bun-cli
BUN_DEPLOY_PKG := $(BUN_DEPLOY_DIR)/$(PACKAGE_NAME)
release-bin-entitlements:
release-cli-push:
rm -rf $(BUN_DEPLOY_CLI)
mkdir -p $(BUN_DEPLOY_CLI)
cp -r packages/bun-cli $(BUN_DEPLOY_CLI)
cd $(BUN_DEPLOY_CLI)/bun-cli; npm pack;
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_CLI)//bun-cli/bun-cli-$(PACKAGE_JSON_VERSION).tgz
npm publish $(BUN_DEPLOY_CLI)/bun-cli/bun-cli-$(PACKAGE_JSON_VERSION).tgz --access=public
release-bin-generate-zip:
release-bin-push: write-package-json-version
rm -rf $(BUN_DEPLOY_DIR)
mkdir -p $(BUN_DEPLOY_DIR)
cp -r $(PACKAGE_DIR) $(BUN_DEPLOY_DIR)
cd $(BUN_DEPLOY_PKG); npm pack;
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_PKG)/$(PACKAGE_NAME)-$(PACKAGE_JSON_VERSION).tgz
npm publish $(BUN_DEPLOY_PKG)/$(PACKAGE_NAME)-$(PACKAGE_JSON_VERSION).tgz --access=public
ifeq ($(OS_NAME),darwin)
# Without this, JIT will fail on aarch64
# strip will remove the entitlements.plist
# which, in turn, will break JIT
release-bin-entitlements:
codesign --entitlements $(realpath entitlements.plist) --options runtime --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict $(BUN_RELEASE_BIN)
# macOS expects a specific directory structure for the zip file
# ditto lets us generate it similarly to right clicking "Compress" in Finder
release-bin-generate-zip:
dot_clean -vnm /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET)
cd /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET) && \
codesign --entitlements $(realpath entitlements.plist) --options runtime --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict bun
ditto -ck --rsrc --sequesterRsrc --keepParent /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET) $(BUN_DEPLOY_ZIP)
else
release-bin-generate-zip:
cd /tmp/bun-$(PACKAGE_JSON_VERSION)/ && zip -r bun-$(TRIPLET)
endif
BUN_DEPLOY_ZIP = /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET).zip
release-bin-generate-copy:
rm -rf /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET) $(BUN_DEPLOY_ZIP)
mkdir -p /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET)
cp $(BUN_RELEASE_BIN) /tmp/bun-$(PACKAGE_JSON_VERSION)/bun-$(TRIPLET)/bun
release-bin-generate: release-bin-generate-copy release-bin-generate-zip
release-bin-codesign:
xcrun notarytool submit --wait $(BUN_DEPLOY_ZIP) --keychain-profile "bun"
release-bin-check:
test $(shell eval $(BUN_RELEASE_BIN) --version) = $(PACKAGE_JSON_VERSION)
release-bin-without-push: test-all release-bin-check release-bin-generate release-bin-codesign
release-bin: release-bin-without-push release-bin-push
release-bin-push:
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP)
gh release upload $(BUN_BUILD_TAG) --clobber $(BUN_DEPLOY_ZIP) --repo $(BUN_AUTO_UPDATER_REPO)
dev-obj:
zig build obj
@@ -497,13 +551,29 @@ mkdir-dev:
test-install:
cd integration/scripts && pnpm install
test-all: test-install test-with-hmr test-no-hmr
test-all: test-install test-with-hmr test-no-hmr test-create-next test-create-react test-bun-run
test-all-mac: test-install test-with-hmr-mac test-no-hmr-mac test-create-next-mac test-create-react-mac test-bun-run-mac
copy-test-node-modules:
rm -rf integration/snippets/package-json-exports/node_modules || echo "";
cp -r integration/snippets/package-json-exports/_node_modules_copy integration/snippets/package-json-exports/node_modules || echo "";
kill-bun:
-killall -9 bun bun-debug
test-dev-create-next:
BUN_BIN=$(DEBUG_BUN) bash integration/apps/bun-create-next.sh
test-dev-create-react:
BUN_BIN=$(DEBUG_BUN) bash integration/apps/bun-create-react.sh
test-create-next:
BUN_BIN=$(RELEASE_BUN) bash integration/apps/bun-create-next.sh
test-bun-run:
cd integration/apps && BUN_BIN=$(RELEASE_BUN) bash ./bun-run-check.sh
test-create-react:
BUN_BIN=$(RELEASE_BUN) bash integration/apps/bun-create-react.sh
test-with-hmr: kill-bun copy-test-node-modules
BUN_BIN=$(RELEASE_BUN) node integration/scripts/browser.js
@@ -512,6 +582,22 @@ test-no-hmr: kill-bun copy-test-node-modules
-killall bun -9;
DISABLE_HMR="DISABLE_HMR" BUN_BIN=$(RELEASE_BUN) node integration/scripts/browser.js
test-create-next-mac:
BUN_BIN=$(MAC_BUN) bash integration/apps/bun-create-next.sh
test-bun-run-mac:
cd integration/apps && BUN_BIN=$(MAC_BUN) bash ./bun-run-check.sh
test-create-react-mac:
BUN_BIN=$(MAC_BUN) bash integration/apps/bun-create-react.sh
test-with-hmr-mac: kill-bun copy-test-node-modules
BUN_BIN=$(MAC_BUN) node integration/scripts/browser.js
test-no-hmr-mac: kill-bun copy-test-node-modules
-killall bun -9;
DISABLE_HMR="DISABLE_HMR" BUN_BIN=$(MAC_BUN) node integration/scripts/browser.js
test-dev-with-hmr: copy-test-node-modules
-killall bun-debug -9;
BUN_BIN=$(DEBUG_BUN) node integration/scripts/browser.js
@@ -520,7 +606,10 @@ test-dev-no-hmr: copy-test-node-modules
-killall bun-debug -9;
DISABLE_HMR="DISABLE_HMR" BUN_BIN=$(DEBUG_BUN) node integration/scripts/browser.js
test-dev-all: test-dev-with-hmr test-dev-no-hmr
test-dev-bun-run:
cd integration/apps && BUN_BIN=$(DEBUG_BUN) bash bun-run-check.sh
test-dev-all: test-dev-with-hmr test-dev-no-hmr test-dev-create-next test-dev-create-react test-dev-bun-run
test-dev: test-dev-with-hmr
@@ -528,7 +617,7 @@ jsc-copy-headers:
find src/javascript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} src/javascript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
jsc-build-mac-compile:
cd src/javascript/jsc/WebKit && ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_BUILD_TYPE=relwithdebinfo"
cd src/javascript/jsc/WebKit && ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_BUILD_TYPE=relwithdebinfo -DPTHREAD_JIT_PERMISSIONS_API=1"
jsc-build-linux-compile:
cd src/javascript/jsc/WebKit && ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON -DCMAKE_BUILD_TYPE=relwithdebinfo -DUSE_THIN_ARCHIVES=OFF"
@@ -553,8 +642,6 @@ clean: clean-bindings
(cd src/deps/picohttp && make clean) || echo "";
(cd src/deps/zlib && make clean) || echo "";
jsc-bindings-mac: $(OBJ_FILES)
@@ -568,26 +655,23 @@ bun-link-lld-debug:
-W \
-o $(DEBUG_BIN)/bun-debug \
bun-relink-copy:
cp /tmp/bun-$(PACKAGE_JSON_VERSION).o $(BUN_RELEASE_BIN).o
bun-relink: bun-relink-copy bun-link-lld-release
bun-link-lld-release:
$(CXX) $(BUN_LLD_FLAGS) \
$(BIN_DIR)/bun.o \
-o $(BIN_DIR)/bun \
$(BUN_RELEASE_BIN).o \
-o $(BUN_RELEASE_BIN) \
-W \
-flto \
-ftls-model=initial-exec \
-O3
cp $(BIN_DIR)/bun $(BIN_DIR)/bun-profile
$(STRIP) $(BIN_DIR)/bun
rm $(BIN_DIR)/bun.o
bun-link-lld-release-aarch64:
$(CXX) $(BUN_LLD_FLAGS) \
build/macos-aarch64/bun.o \
-o build/macos-aarch64/bun \
-Wl,-dead_strip \
-ftls-model=initial-exec \
-flto \
-O3
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
$(STRIP) $(BUN_RELEASE_BIN)
mv $(BUN_RELEASE_BIN).o /tmp/bun-$(PACKAGE_JSON_VERSION).o
# We do this outside of build.zig for performance reasons
# The C compilation stuff with build.zig is really slow and we don't need to run this as often as the rest
@@ -722,4 +806,4 @@ run-unit:
test: build-unit run-unit
integration-test-dev:
USE_EXISTING_PROCESS=true node integration/scripts/browser.js
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node integration/scripts/browser.js

127
README.md
View File

@@ -6,6 +6,7 @@ Bun is a new:
- JavaScript & CSS bundler
- Development server with 60fps Hot Module Reloading (& WIP support for React Fast Refresh)
- JavaScript Runtime Environment (powered by JavaScriptCore, what WebKit/Safari uses)
- Task runner for package.json scripts
All in one fast & easy-to-use tool. Instead of 1,000 node_modules for development, you only need Bun.
@@ -14,8 +15,7 @@ All in one fast & easy-to-use tool. Instead of 1,000 node_modules for develo
## Install:
```
# Global install is recommended so bun appears in your $PATH
npm install -g bun-cli
curl -fsSL bun.sh/install | bash
```
## Benchmarks
@@ -24,7 +24,30 @@ npm install -g bun-cli
**JavaScript**: TODO
### Getting started
## Using Bun as a task runner
Instead of waiting 170ms for your npm client to start for each task, you wait 6ms for Bun.
To use bun as a task runner, run `bun run` instead of `npm run`.
```bash
# Instead of "npm run clean"
bun run clean
# This also works
bun clean
```
Assuming a package.json with a `"clean"` command in `"scripts"`:
```json
{
"name": "myapp",
"scripts": {
"clean": "rm -rf dist out node_modules"
}
}
```
## Using Bun with Next.js
@@ -154,7 +177,6 @@ Bun is a project with incredibly large scope, and it's early days.
| Sharing `.bun` files | Bun |
| [Finish fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) | Bun.js |
| [setTimeout](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) | Bun.js |
| `bun run` command | Bun.js |
<sup>JS Transpiler == JavaScript Transpiler</sup><br/>
<sup>TS Transpiler == TypeScript Transpiler</sup><br/>
@@ -188,6 +210,9 @@ Currently, Bun implements the following loaders:
| .ts | TypeScript + JavaScript | .js |
| .tsx | TypeScript + JSX + JavaScript | .js |
| .mjs | JavaScript | .js |
| .cjs | JavaScript | .js |
| .mts | TypeScript | .js |
| .cts | TypeScript | .js |
| .css | CSS | .css |
| .env | Env | N/A |
| .\* | file | string |
@@ -432,12 +457,88 @@ When running bun on an M1 (or Apple Silicon), if you see a message like this:
> [1] 28447 killed bun create next ./test
It most likely means you're running bun's x64 version on Apple Silicon. This happens if `node` (or, rather, `npm`) is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
It most likely means you're running bun's x64 version on Apple Silicon. This happens if bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
The fix is to ensure you installed a version of Node built for Apple Silicon and then reinstall `bun-cli`. You can also try to directly install `npm install -g bun-cli-darwin-aarch64`.
The fix is to ensure you installed a version of Bun built for Apple Silicon.
# Reference
### `bun run`
`bun run` is a fast `package.json` scripts runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for Bun.
By default, `bun run` prints the script that will be invoked:
```bash
bun run clean
$ rm -rf node_modules/.cache dist
```
You can disable that with `--silent`
```bash
bun run --silent clean
```
To print a list of `scripts`, `bun run` without additional args:
```bash
# This command
bun run
# Prints this
hello-create-react-app scripts:
bun run start
react-scripts start
bun run build
react-scripts build
bun run test
react-scripts test
bun run eject
react-scripts eject
4 scripts
```
`bun run` automatically loads environment variables from `.env` into the shell/task. `.env` files are loaded with the same priority as the rest of Bun, so that means:
1. `.env.local` is first
2. if (`$NODE_ENV` === `"production"`) `.env.production` else `.env.development`
3. `.env`
If something is unexpected there, you can run `bun run env` to get a list of environment variables.
The default shell it uses is `bash`, but if that's not found, it tries `sh` and if still not found, it tries `zsh`. This is not configurable right now, but if you care file an issue.
`bun run` automatically adds any parent `node_modules/.bin` to `$PATH` and if no scripts match, it will load that binary instead. That means you can run executables from packages too.
```bash
# If you use Relay
bun run relay-compiler
# You can also do this, but:
# - It will only lookup packages in `node_modules/.bin` instead of `$PATH`
# - It will start Bun's dev server if the script name doesn't exist (`bun` starts the dev server by default)
bun relay-compiler
```
To pass additional flags through to the task or executable, there are two ways:
```bash
# Explicit: include "--" and anything after will be added. This is the recommended way because it is more reliable.
bun run relay-compiler -- -help
# Implicit: if you do not include "--", anything *after* the script name will be passed through
# Bun flags are parsed first, which means e.g. `bun run relay-compiler --help` will print Bun's help instead of relay-compiler's help.
bun run relay-compiler --schema foo.graphql
```
`bun run` supports lifecycle hooks like `post${task}` and `pre{task}`. If they exist, they will run matching the behavior of npm clients. If the `pre${task}` fails, the next task will not be run. There is currently no flag to skip these lifecycle tasks if they exist, if you want that file an issue.
### `bun create`
`bun create` is a fast way to create a new project from a template.
@@ -814,7 +915,13 @@ In `bun`:
```bash
git submodule update --init --recursive --progress --depth=1
make vendor
make vendor dev
```
Verify it worked:
```bash
make test-dev-all
```
Note that `brew install zig` won't work. Bun uses a build of Zig with a couple patches.
@@ -856,6 +963,12 @@ Compile Bun:
make vendor dev
```
Verify it worked:
```bash
make test-dev-all
```
Run bun:
```bash

View File

@@ -1 +1 @@
37
40

250
build.zig
View File

@@ -34,7 +34,7 @@ pub fn build(b: *std.build.Builder) !void {
const cwd: []const u8 = b.pathFromRoot(".");
var exe: *std.build.LibExeObjStep = undefined;
var output_dir_buf = std.mem.zeroes([4096]u8);
var bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-cli-" else "packages/bun-cli-";
var bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-" else "packages/bun-";
var triplet_buf: [64]u8 = undefined;
var os_tagname = @tagName(target.getOs().tag);
@@ -60,7 +60,7 @@ pub fn build(b: *std.build.Builder) !void {
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}/bin", .{ bin_label, triplet });
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
const output_dir = b.pathFromRoot(output_dir_base);
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
@@ -133,16 +133,21 @@ pub fn build(b: *std.build.Builder) !void {
0,
try runtime_out_file.readToEndAlloc(b.allocator, try runtime_out_file.getEndPos()),
);
const runtime_version_file = std.fs.cwd().openFile("src/runtime.version", .{ .write = true }) catch unreachable;
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch unreachable;
defer runtime_version_file.close();
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .read = true });
const fallback_hash = std.hash.Wyhash.hash(
0,
try fallback_out_file.readToEndAlloc(b.allocator, try fallback_out_file.getEndPos()),
);
const fallback_version_file = std.fs.cwd().openFile("src/fallback.version", .{ .write = true }) catch unreachable;
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch unreachable;
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
defer fallback_version_file.close();
exe.setTarget(target);
@@ -202,97 +207,114 @@ pub fn build(b: *std.build.Builder) !void {
// if (target.getOsTag() == .macos) "-DUSE_FOUNDATION=1" else "",
// if (target.getOsTag() == .macos) "-DUSE_CF_RETAIN_PTR=1" else "",
// };
const headers_step = b.step("headers", "JSC headers");
var headers_exec: *std.build.LibExeObjStep = b.addExecutable("headers", "src/javascript/jsc/bindings/bindings-generator.zig");
var headers_runner = headers_exec.run();
headers_exec.setMainPkgPath(javascript.main_pkg_path.?);
headers_step.dependOn(&headers_runner.step);
var translate_c: *std.build.TranslateCStep = b.addTranslateC(.{ .path = b.pathFromRoot("src/javascript/jsc/bindings/headers.h") });
translate_c.out_basename = "headers";
translate_c.output_dir = b.pathFromRoot("src/javascript/jsc/bindings/");
headers_step.dependOn(&translate_c.step);
headers_zig_file = b.pathFromRoot("src/javascript/jsc/bindings/headers.zig");
const headers_step = b.step("headers-obj", "JSC headers Step #1");
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/javascript/jsc/bindings/bindings-generator.zig");
headers_obj.setMainPkgPath(javascript.main_pkg_path.?);
headers_step.dependOn(&headers_obj.step);
original_make_fn = headers_step.makeFn;
headers_step.makeFn = HeadersMaker.make;
b.default_step.dependOn(&exe.step);
var steps = [_]*std.build.LibExeObjStep{ exe, javascript, typings_exe, headers_exec };
{
var steps = [_]*std.build.LibExeObjStep{ exe, javascript, typings_exe };
// const single_threaded = b.option(bool, "single-threaded", "Build single-threaded") orelse false;
// const single_threaded = b.option(bool, "single-threaded", "Build single-threaded") orelse false;
for (steps) |step, i| {
step.linkLibC();
step.linkLibCpp();
addPicoHTTP(
step,
true,
);
step.addObjectFile("src/deps/libJavaScriptCore.a");
step.addObjectFile("src/deps/libWTF.a");
step.addObjectFile("src/deps/libcrypto.a");
step.addObjectFile("src/deps/libbmalloc.a");
step.addObjectFile("src/deps/libarchive.a");
step.addObjectFile("src/deps/libs2n.a");
step.addObjectFile("src/deps/zlib/libz.a");
step.addObjectFile("src/deps/mimalloc/libmimalloc.a");
step.addLibPath("src/deps/mimalloc");
step.addIncludeDir("src/deps/mimalloc");
// step.single_threaded = single_threaded;
if (target.getOsTag() == .macos) {
const homebrew_prefix = comptime if (std.Target.current.cpu.arch == .aarch64)
"/opt/homebrew/"
else
"/usr/local/";
// We must link ICU statically
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicudata.a");
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicui18n.a");
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicuuc.a");
step.addObjectFile(homebrew_prefix ++ "opt/libiconv/lib/libiconv.a");
// icucore is a weird macOS only library
step.linkSystemLibrary("icucore");
step.addLibPath(homebrew_prefix ++ "opt/icu4c/lib");
step.addIncludeDir(homebrew_prefix ++ "opt/icu4c/include");
} else {
step.linkSystemLibrary("icuuc");
step.linkSystemLibrary("icudata");
step.linkSystemLibrary("icui18n");
step.linkSystemLibrary("iconv");
}
for (bindings_files.items) |binding| {
step.addObjectFile(
binding,
for (steps) |step, i| {
step.linkLibC();
step.linkLibCpp();
addPicoHTTP(
step,
true,
);
step.addObjectFile("src/deps/libJavaScriptCore.a");
step.addObjectFile("src/deps/libWTF.a");
step.addObjectFile("src/deps/libcrypto.a");
step.addObjectFile("src/deps/libbmalloc.a");
step.addObjectFile("src/deps/libarchive.a");
step.addObjectFile("src/deps/libs2n.a");
step.addObjectFile("src/deps/zlib/libz.a");
step.addObjectFile("src/deps/mimalloc/libmimalloc.a");
step.addLibPath("src/deps/mimalloc");
step.addIncludeDir("src/deps/mimalloc");
// step.single_threaded = single_threaded;
if (target.getOsTag() == .macos) {
const homebrew_prefix = comptime if (std.Target.current.cpu.arch == .aarch64)
"/opt/homebrew/"
else
"/usr/local/";
// We must link ICU statically
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicudata.a");
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicui18n.a");
step.addObjectFile(homebrew_prefix ++ "opt/icu4c/lib/libicuuc.a");
step.addObjectFile(homebrew_prefix ++ "opt/libiconv/lib/libiconv.a");
// icucore is a weird macOS only library
step.linkSystemLibrary("icucore");
step.addLibPath(homebrew_prefix ++ "opt/icu4c/lib");
step.addIncludeDir(homebrew_prefix ++ "opt/icu4c/include");
} else {
step.linkSystemLibrary("icuuc");
step.linkSystemLibrary("icudata");
step.linkSystemLibrary("icui18n");
step.addObjectFile("src/deps/libiconv.a");
}
for (bindings_files.items) |binding| {
step.addObjectFile(
binding,
);
}
}
}
var obj_step = b.step("obj", "Build Bun as a .o file");
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
obj.setTarget(target);
addPicoHTTP(obj, false);
obj.addPackage(.{
.name = "clap",
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
});
{
var obj_step = b.step("obj", "Build Bun as a .o file");
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
obj.setTarget(target);
addPicoHTTP(obj, false);
obj.addPackage(.{
.name = "clap",
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
});
obj_step.dependOn(&obj.step);
obj.setOutputDir(output_dir);
obj.setBuildMode(mode);
obj.linkLibC();
obj.linkLibCpp();
obj.strip = false;
obj.bundle_compiler_rt = true;
obj_step.dependOn(&obj.step);
obj.setOutputDir(output_dir);
obj.setBuildMode(mode);
obj.linkLibC();
obj.linkLibCpp();
if (target.getOsTag() == .linux) {
// obj.want_lto = tar;
obj.link_emit_relocs = true;
obj.link_function_sections = true;
obj.strip = false;
obj.bundle_compiler_rt = true;
if (target.getOsTag() == .linux) {
// obj.want_lto = tar;
obj.link_emit_relocs = true;
obj.link_function_sections = true;
}
}
{
headers_obj.setTarget(target);
headers_obj.addPackage(.{
.name = "clap",
.path = .{ .path = "src/deps/zig-clap/clap.zig" },
});
headers_obj.setOutputDir(output_dir);
headers_obj.setBuildMode(mode);
headers_obj.linkLibC();
headers_obj.linkLibCpp();
headers_obj.bundle_compiler_rt = true;
if (target.getOsTag() == .linux) {
// obj.want_lto = tar;
headers_obj.link_emit_relocs = true;
headers_obj.link_function_sections = true;
}
}
} else {
b.default_step.dependOn(&exe.step);
@@ -334,59 +356,3 @@ pub fn build(b: *std.build.Builder) !void {
}
pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
pub var headers_zig_file: ?[]const u8 = null;
const HeadersMaker = struct {
pub fn make(self: *std.build.Step) anyerror!void {
try original_make_fn.?(self);
var headers_zig: std.fs.File = try std.fs.openFileAbsolute(headers_zig_file.?, .{ .write = true });
var contents = try headers_zig.readToEndAlloc(std.heap.page_allocator, headers_zig.getEndPos() catch unreachable);
const last_extern_i = std.mem.lastIndexOf(u8, contents, "pub extern fn") orelse @panic("Expected contents");
const last_newline = std.mem.indexOf(u8, contents[last_extern_i..], "\n") orelse @panic("Expected newline");
const to_splice = "usingnamespace @import(\"./headers-replacements.zig\");\n";
var new_contents = try std.heap.page_allocator.alloc(u8, contents.len + to_splice.len);
std.mem.copy(u8, new_contents, to_splice);
std.mem.copy(u8, new_contents[to_splice.len..], contents);
var i: usize = to_splice.len;
var remainder = new_contents[i..];
while (remainder.len > 0) {
i = std.mem.indexOf(u8, remainder, "\npub const struct_b") orelse break + "\npub const struct_b".len;
var begin = remainder[i..];
const end_line = std.mem.indexOf(u8, begin, "extern struct {") orelse break;
const end_struct = std.mem.indexOf(u8, begin, "\n};\n") orelse break + "\n};\n".len;
std.mem.set(u8, begin[1 .. end_struct + 3], ' ');
remainder = begin[end_struct..];
}
i = to_splice.len;
remainder = new_contents[i..];
while (remainder.len > 0) {
i = std.mem.indexOf(u8, remainder, "\npub const struct_") orelse break + "\npub const struct_".len;
var begin = remainder[i..];
var end_struct = std.mem.indexOf(u8, begin, "opaque {};") orelse break;
end_struct += std.mem.indexOf(u8, begin[end_struct..], "\n") orelse break;
i = 0;
std.mem.set(u8, begin[1..end_struct], ' ');
remainder = begin[end_struct..];
}
const HARDCODE = [_][]const u8{
"[*c][*c]JSC__Exception",
"*?*JSC__Exception ",
"[*c]?*c_void",
"[*c]*c_void",
};
i = 0;
while (i < HARDCODE.len) : (i += 2) {
_ = std.mem.replace(u8, new_contents, HARDCODE[i], HARDCODE[i + 1], new_contents);
}
const js_value_start = std.mem.indexOf(u8, new_contents, "pub const JSC__JSValue") orelse unreachable;
const js_value_end = std.mem.indexOf(u8, new_contents[js_value_start..], "\n") orelse unreachable;
std.mem.set(u8, new_contents[js_value_start..][0..js_value_end], ' ');
try headers_zig.seekTo(0);
try headers_zig.writeAll(new_contents);
try headers_zig.setEndPos(last_newline + last_extern_i + to_splice.len);
}
};

View File

@@ -2,13 +2,15 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
</dict>
</plist>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -1,6 +1,6 @@
{
"name": "@bun-examples/next",
"version": "0.0.35",
"version": "0.0.37",
"main": "index.js",
"dependencies": {
"next": "11.1.2",

View File

@@ -1,6 +1,6 @@
{
"name": "@bun-examples/react",
"version": "0.0.31",
"version": "0.0.32",
"dependencies": {
"@testing-library/jest-dom": "^5.11.4",
"@testing-library/react": "^11.1.0",

View File

@@ -0,0 +1,16 @@
#!/bin/bash
# The important part of this test: make sure that Bun.js successfully loads
# The most likely reason for this test to fail is that something broke in the JavaScriptCore <> Bun integration
killall -9 $(basename $BUN_BIN) || echo "";
rm -rf /tmp/next-app;
mkdir -p /tmp/next-app;
$BUN_BIN create next /tmp/next-app;
cd /tmp/next-app;
BUN_CRASH_WITHOUT_JIT=1 $BUN_BIN --port 8087 &
sleep 0.005
curl --fail http://localhost:8087/ && killall -9 $(basename $BUN_BIN) && echo "✅ bun create next passed."
exit $?

View File

@@ -0,0 +1,14 @@
#!/bin/bash
killall -9 $(basename $BUN_BIN) || echo "";
rm -rf /tmp/react-app;
mkdir -p /tmp/react-app;
$BUN_BIN create react /tmp/react-app;
cd /tmp/react-app;
BUN_CRASH_WITHOUT_JIT=1 $BUN_BIN --port 8087 &
sleep 0.005
curl --fail http://localhost:8087/ && killall -9 $(basename $BUN_BIN) && echo "✅ bun create react passed."
exit $?

View File

@@ -0,0 +1,6 @@
{
"name": "check",
"scripts": {
"this-should-work": "echo \"✅ bun run test passed!\""
}
}

View File

@@ -0,0 +1,15 @@
#!/bin/bash
killall -9 $(basename $BUN_BIN) || echo "";
rm -rf /tmp/bun-run-check
mkdir -p /tmp/bun-run-check
cp ./bun-run-check-package.json /tmp/bun-run-check/package.json
cd /tmp/bun-run-check
bun run --silent this-should-work
exit $?

View File

@@ -15,6 +15,7 @@ const bunFlags = [
const bunExec = process.env.BUN_BIN || "bun";
var bunProcess;
var waitSpawn;
if (!USE_EXISTING_PROCESS) {
bunProcess = child_process.spawn(bunExec, bunFlags, {
cwd: snippetsDir,
@@ -29,10 +30,17 @@ if (!USE_EXISTING_PROCESS) {
console.log("$", bunExec, bunFlags.join(" "));
bunProcess.stderr.pipe(process.stderr);
bunProcess.stdout.pipe(process.stdout);
var rejecter;
bunProcess.once("error", (err) => {
console.error("❌ bun error", err);
process.exit(1);
});
waitSpawn = new Promise((resolve, reject) => {
bunProcess.once("spawn", (code) => {
console.log("Spawned");
resolve();
});
});
process.on("beforeExit", () => {
bunProcess?.kill(0);
});
@@ -69,6 +77,9 @@ async function main() {
const promises = [];
let allTestsPassed = true;
if (waitSpawn) await waitSpawn;
var canRetry = true;
async function runPage(key) {
var page;
try {
@@ -90,13 +101,25 @@ async function main() {
let testDone = new Promise((resolve) => {
page.exposeFunction("testDone", resolve);
});
await page.goto(`${serverURL}/`, {
waitUntil: "domcontentloaded",
});
await page.evaluate(`
try {
await page.goto(`${serverURL}/`, {
waitUntil: "domcontentloaded",
});
await page.evaluate(`
globalThis.runTest("${key}");
`);
await testDone;
await testDone;
} catch (err) {
if (canRetry) {
console.log(
`${key} failed once (incase it's still booting on universal binary for the first time). Retrying...`
);
canRetry = false;
return await runPage(key);
}
throw err;
}
console.log(`${key}`);
} catch (e) {
@@ -113,7 +136,7 @@ async function main() {
console.warn(`Failed to update snapshot: ${key}`, exception);
}
}
canRetry = false;
if (shouldClose) await page.close();
}

View File

@@ -0,0 +1,58 @@
const std = @import("std");
pub fn main() anyerror!void {
const headers_zig_file_src: std.builtin.SourceLocation = @src();
var paths = [_][]const u8{std.mem.span(headers_zig_file_src.file), "../../src/javascript/jsc/bindings/headers.zig"};
const headers_zig_file = try std.fs.path.resolve(std.heap.c_allocator, &paths);
std.debug.print("Writing to {s}", .{headers_zig_file});
var headers_zig: std.fs.File = try std.fs.openFileAbsolute(headers_zig_file, .{ .write = true });
var contents = try headers_zig.readToEndAlloc(std.heap.page_allocator, headers_zig.getEndPos() catch unreachable);
const last_extern_i = std.mem.lastIndexOf(u8, contents, "pub extern fn") orelse @panic("Expected contents");
const last_newline = std.mem.indexOf(u8, contents[last_extern_i..], "\n") orelse @panic("Expected newline");
const to_splice = "usingnamespace @import(\"./headers-replacements.zig\");\n";
var new_contents = try std.heap.page_allocator.alloc(u8, contents.len + to_splice.len);
std.mem.copy(u8, new_contents, to_splice);
std.mem.copy(u8, new_contents[to_splice.len..], contents);
var i: usize = to_splice.len;
var remainder = new_contents[i..];
while (remainder.len > 0) {
i = std.mem.indexOf(u8, remainder, "\npub const struct_b") orelse break + "\npub const struct_b".len;
var begin = remainder[i..];
const end_line = std.mem.indexOf(u8, begin, "extern struct {") orelse break;
const end_struct = std.mem.indexOf(u8, begin, "\n};\n") orelse break + "\n};\n".len;
std.mem.set(u8, begin[1 .. end_struct + 3], ' ');
remainder = begin[end_struct..];
}
i = to_splice.len;
remainder = new_contents[i..];
while (remainder.len > 0) {
i = std.mem.indexOf(u8, remainder, "\npub const struct_") orelse break + "\npub const struct_".len;
var begin = remainder[i..];
var end_struct = std.mem.indexOf(u8, begin, "opaque {};") orelse break;
end_struct += std.mem.indexOf(u8, begin[end_struct..], "\n") orelse break;
i = 0;
std.mem.set(u8, begin[1..end_struct], ' ');
remainder = begin[end_struct..];
}
const HARDCODE = [_][]const u8{
"[*c][*c]JSC__Exception",
"*?*JSC__Exception ",
"[*c]?*c_void",
"[*c]*c_void",
};
i = 0;
while (i < HARDCODE.len) : (i += 2) {
_ = std.mem.replace(u8, new_contents, HARDCODE[i], HARDCODE[i + 1], new_contents);
}
const js_value_start = std.mem.indexOf(u8, new_contents, "pub const JSC__JSValue") orelse unreachable;
const js_value_end = std.mem.indexOf(u8, new_contents[js_value_start..], "\n") orelse unreachable;
std.mem.set(u8, new_contents[js_value_start..][0..js_value_end], ' ');
try headers_zig.seekTo(0);
try headers_zig.writeAll(new_contents);
try headers_zig.setEndPos(last_newline + last_extern_i + to_splice.len);
}

View File

@@ -74,7 +74,7 @@ pub fn main() anyerror!void {
tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
}
_ = try Archive.extractToDisk(
_ = try Archive.extractToDisk(
file_buf,
folder,
null,

View File

@@ -1,8 +0,0 @@
{
"directories": {
"bin": "bin"
},
"name": "bun-cli-darwin-aarch64",
"repository": "https://github.com/jarred-sumner/bun",
"version": "0.0.0-11"
}

View File

@@ -1,8 +0,0 @@
{
"directories": {
"bin": "bin"
},
"name": "bun-cli-darwin-x64",
"repository": "https://github.com/jarred-sumner/bun",
"version": "0.0.36"
}

View File

@@ -1,8 +0,0 @@
{
"directories": {
"bin": "bin"
},
"name": "bun-cli-linux-x64",
"repository": "https://github.com/jarred-sumner/bun",
"version": "0.0.28"
}

View File

@@ -1,2 +0,0 @@
scripts
reset-bin.js

View File

@@ -1,13 +0,0 @@
{
"bin": {
"bun": "bin/bun"
},
"license": "MIT",
"name": "bun-cli",
"repository": "https://github.com/jarred-sumner/bun",
"scripts": {
"postinstall": "node postinstall.js",
"prepublishOnly": "rm -rf ./bin/bun; chmod +x ./reset-bin.js; cp ./reset-bin.js ./bin/bun"
},
"version": "0.0.36"
}

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env node
throw new Error(`bun-cli: Failed to install correctly
Make sure you don't have "ignore-scripts" set to true. You can check this with
"npm config get ignore-scripts". If that returns true you can reset it back to
false using "npm config set ignore-scripts false" and then reinstall bun.
If you're using npm v7, make sure your package-lock.json file contains either
"lockfileVersion": 1 or the code "hasInstallScript": true. If it doesn't have
either of those, then it is likely the case that a known bug in npm v7 has
corrupted your package-lock.json file. Regenerating your package-lock.json file
should fix this issue.
`);

View File

@@ -1,360 +0,0 @@
// This is almost verbatim esbuild's postinstall script.
// Thank you @evanw.
import fs = require("fs");
import os = require("os");
import path = require("path");
import zlib = require("zlib");
import https = require("https");
import child_process = require("child_process");
declare const BUN_VERSION: string;
const version = BUN_VERSION;
const binPath = path.join(__dirname, "bin", "bun");
async function installBinaryFromPackage(
name: string,
fromPath: string,
toPath: string
): Promise<void> {
// Try to install from the cache if possible
const cachePath = getCachePath(name);
try {
// Copy from the cache
fs.copyFileSync(cachePath, toPath);
fs.chmodSync(toPath, 0o755);
// Verify that the binary is the correct version
validateBinaryVersion(toPath);
// Mark the cache entry as used for LRU
const now = new Date();
fs.utimesSync(cachePath, now, now);
return;
} catch {}
// Next, try to install using npm. This should handle various tricky cases
// such as environments where requests to npmjs.org will hang (in which case
// there is probably a proxy and/or a custom registry configured instead).
let buffer: Buffer | undefined;
let didFail = false;
try {
buffer = installUsingNPM(name, fromPath);
} catch (err) {
didFail = true;
console.error(`Trying to install "${name}" using npm`);
console.error(
`Failed to install "${name}" using npm: ${(err && err.message) || err}`
);
}
// If that fails, the user could have npm configured incorrectly or could not
// have npm installed. Try downloading directly from npm as a last resort.
if (!buffer) {
const url = `https://registry.npmjs.org/${name}/-/${name}-${version}.tgz`;
console.error(`Trying to download ${JSON.stringify(url)}`);
try {
buffer = extractFileFromTarGzip(await fetch(url), fromPath);
} catch (err) {
console.error(
`Failed to download ${JSON.stringify(url)}: ${
(err && err.message) || err
}`
);
}
}
// Give up if none of that worked
if (!buffer) {
console.error(`Install unsuccessful`);
process.exit(1);
}
// Write out the binary executable that was extracted from the package
fs.writeFileSync(toPath, buffer, { mode: 0o755 });
// Verify that the binary is the correct version
try {
validateBinaryVersion(toPath);
} catch (err) {
console.error(
`The version of the downloaded binary is incorrect: ${
(err && err.message) || err
}`
);
console.error(`Install unsuccessful`);
process.exit(1);
}
// Also try to cache the file to speed up future installs
try {
fs.mkdirSync(path.dirname(cachePath), {
recursive: true,
mode: 0o700, // https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
});
fs.copyFileSync(toPath, cachePath);
cleanCacheLRU(cachePath);
} catch {}
if (didFail) console.error(`Install successful`);
}
function validateBinaryVersion(binaryPath: string): void {
const stdout = child_process
.execFileSync(binaryPath, ["--version"])
.toString()
.trim();
if (stdout !== version) {
throw new Error(
`Expected ${JSON.stringify(version)} but got ${JSON.stringify(stdout)}`
);
}
}
function getCachePath(name: string): string {
const home = os.homedir();
const common = ["bun", "bin", `${name}@${version}`];
if (process.platform === "darwin")
return path.join(home, "Library", "Caches", ...common);
if (process.platform === "win32")
return path.join(home, "AppData", "Local", "Cache", ...common);
// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
const XDG_CACHE_HOME = process.env.XDG_CACHE_HOME;
if (
process.platform === "linux" &&
XDG_CACHE_HOME &&
path.isAbsolute(XDG_CACHE_HOME)
)
return path.join(XDG_CACHE_HOME, ...common);
return path.join(home, ".cache", ...common);
}
function cleanCacheLRU(fileToKeep: string): void {
// Gather all entries in the cache
const dir = path.dirname(fileToKeep);
const entries: { path: string; mtime: Date }[] = [];
for (const entry of fs.readdirSync(dir)) {
const entryPath = path.join(dir, entry);
try {
const stats = fs.statSync(entryPath);
entries.push({ path: entryPath, mtime: stats.mtime });
} catch {}
}
// Only keep the most recent entries
entries.sort((a, b) => +b.mtime - +a.mtime);
for (const entry of entries.slice(5)) {
try {
fs.unlinkSync(entry.path);
} catch {}
}
}
function fetch(url: string): Promise<Buffer> {
return new Promise((resolve, reject) => {
https
.get(url, (res) => {
if (
(res.statusCode === 301 || res.statusCode === 302) &&
res.headers.location
)
return fetch(res.headers.location).then(resolve, reject);
if (res.statusCode !== 200)
return reject(new Error(`Server responded with ${res.statusCode}`));
let chunks: Buffer[] = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => resolve(Buffer.concat(chunks)));
})
.on("error", reject);
});
}
function extractFileFromTarGzip(buffer: Buffer, file: string): Buffer {
try {
buffer = zlib.unzipSync(buffer);
} catch (err) {
throw new Error(
`Invalid gzip data in archive: ${(err && err.message) || err}`
);
}
let str = (i: number, n: number) =>
String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, "");
let offset = 0;
file = `package/${file}`;
while (offset < buffer.length) {
let name = str(offset, 100);
let size = parseInt(str(offset + 124, 12), 8);
offset += 512;
if (!isNaN(size)) {
if (name === file) return buffer.subarray(offset, offset + size);
offset += (size + 511) & ~511;
}
}
throw new Error(`Could not find ${JSON.stringify(file)} in archive`);
}
function installUsingNPM(name: string, file: string): Buffer {
const installDir = path.join(
os.tmpdir(),
"bun-cli-" + Math.random().toString(36).slice(2)
);
fs.mkdirSync(installDir, { recursive: true });
fs.writeFileSync(path.join(installDir, "package.json"), "{}");
// Erase "npm_config_global" so that "npm install --global bun" works.
// Otherwise this nested "npm install" will also be global, and the install
// will deadlock waiting for the global installation lock.
const env = { ...process.env, npm_config_global: undefined };
child_process.execSync(
`npm install --loglevel=error --prefer-offline --no-audit --progress=false ${name}@${version}`,
{ cwd: installDir, stdio: "pipe", env }
);
const buffer = fs.readFileSync(
path.join(installDir, "node_modules", name, file)
);
try {
removeRecursive(installDir);
} catch (e) {
// Removing a file or directory can randomly break on Windows, returning
// EBUSY for an arbitrary length of time. I think this happens when some
// other program has that file or directory open (e.g. an anti-virus
// program). This is fine on Unix because the OS just unlinks the entry
// but keeps the reference around until it's unused. In this case we just
// ignore errors because this directory is in a temporary directory, so in
// theory it should get cleaned up eventually anyway.
}
return buffer;
}
function removeRecursive(dir: string): void {
for (const entry of fs.readdirSync(dir)) {
const entryPath = path.join(dir, entry);
let stats;
try {
stats = fs.lstatSync(entryPath);
} catch (e) {
continue; // Guard against https://github.com/nodejs/node/issues/4760
}
if (stats.isDirectory()) removeRecursive(entryPath);
else fs.unlinkSync(entryPath);
}
fs.rmdirSync(dir);
}
function isYarnBerryOrNewer(): boolean {
const { npm_config_user_agent } = process.env;
if (npm_config_user_agent) {
const match = npm_config_user_agent.match(/yarn\/(\d+)/);
if (match && match[1]) {
return parseInt(match[1], 10) >= 2;
}
}
return false;
}
function installDirectly(name: string) {
if (process.env.BUN_BINARY_PATH) {
fs.copyFileSync(process.env.BUN_BINARY_PATH, binPath);
validateBinaryVersion(binPath);
} else {
// Write to a temporary file, then move the file into place. This is an
// attempt to avoid problems with package managers like pnpm which will
// usually turn each file into a hard link. We don't want to mutate the
// hard-linked file which may be shared with other files.
const tempBinPath = binPath + "__";
installBinaryFromPackage(name, "bin/bun", tempBinPath)
.then(() => fs.renameSync(tempBinPath, binPath))
.catch((e) =>
setImmediate(() => {
throw e;
})
);
}
}
function installWithWrapper(
name: string,
fromPath: string,
toPath: string
): void {
fs.writeFileSync(
binPath,
`#!/usr/bin/env node
const path = require('path');
const bun_exe = path.join(__dirname, '..', ${JSON.stringify(toPath)});
const child_process = require('child_process');
console.warn("[Bun] Yarn 2's lack of binary support slows Bun down. Consider using a different package manager until https://github.com/yarnpkg/berry/issues/882 is fixed.\n");
const { status } = child_process.spawnSync(bun_exe, process.argv.slice(2), { stdio: 'inherit' });
process.exitCode = status === null ? 1 : status;
`
);
const absToPath = path.join(__dirname, toPath);
if (process.env.BUN_BINARY_PATH) {
fs.copyFileSync(process.env.BUN_BINARY_PATH, absToPath);
validateBinaryVersion(absToPath);
} else {
installBinaryFromPackage(name, fromPath, absToPath).catch((e) =>
setImmediate(() => {
throw e;
})
);
}
}
function installOnUnix(name: string): void {
// Yarn 2 is deliberately incompatible with binary modules because the
// developers of Yarn 2 don't think they should be used. See this thread for
// details: https://github.com/yarnpkg/berry/issues/882.
//
// We want to avoid slowing down bun for everyone just because of this
// decision by the Yarn 2 developers, so we explicitly detect if bun is
// being installed using Yarn 2 and install a compatability shim only for
// Yarn 2. Normal package managers can just run the binary directly for
// maximum speed.
if (isYarnBerryOrNewer()) {
installWithWrapper(name, "bin/bun", "bun");
} else {
installDirectly(name);
}
}
function installOnWindows(name: string): void {
installWithWrapper(name, "bun.exe", "bun.exe");
}
const platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`;
const knownWindowsPackages: Record<string, string> = {
// "win32 arm64 LE": "bun-cli-windows-arm64",
// "win32 ia32 LE": "bun-cli-windows-32",
// "win32 x64 LE": "bun-cli-windows-64",
};
const knownUnixlikePackages: Record<string, string> = {
// "android arm64 LE": "bun-cli-android-arm64",
"darwin arm64 LE": "bun-cli-darwin-aarch64",
"darwin x64 LE": "bun-cli-darwin-x64",
"linux x64 LE": "bun-cli-linux-x64",
// "freebsd arm64 LE": "bun-cli-freebsd-arm64",
// "freebsd x64 LE": "bun-cli-freebsd-64",
// "openbsd x64 LE": "bun-cli-openbsd-64",
// "linux arm LE": "bun-cli-linux-arm",
// "linux arm64 LE": "bun-cli-linux-arm64",
// "linux ia32 LE": "bun-cli-linux-32",
// "linux mips64el LE": "bun-cli-linux-mips64le",
// "linux ppc64 LE": "bun-cli-linux-ppc64le",
// "sunos x64 LE": "bun-cli-sunos-64",
};
// Pick a package to install
if (platformKey in knownWindowsPackages) {
installOnWindows(knownWindowsPackages[platformKey]);
} else if (platformKey in knownUnixlikePackages) {
installOnUnix(knownUnixlikePackages[platformKey]);
} else {
console.error(`Unsupported platform: ${platformKey}`);
process.exit(1);
}

View File

@@ -1,2 +1,5 @@
*.bun
node_modules
node_modules
pnpm-log.yaml
yarn-error.log
yarn.lock

View File

@@ -1,7 +1,7 @@
{
"name": "bun-framework-next",
"version": "0.0.0-22",
"description": "",
"version": "0.0.0-23",
"description": "Bun compatibility layer for Next.js v11.1.2",
"framework": {
"displayName": "Next.js",
"static": "public",

View File

@@ -35,7 +35,7 @@ const BuildCommand = @import("./cli/build_command.zig").BuildCommand;
const CreateCommand = @import("./cli/create_command.zig").CreateCommand;
const CreateListExamplesCommand = @import("./cli/create_command.zig").CreateListExamplesCommand;
const RunCommand = @import("./cli/run_command.zig").RunCommand;
const UpgradeCommand = @import("./cli/upgrade_command.zig").UpgradeCommand;
var start_time: i128 = undefined;
pub const Cli = struct {
@@ -146,7 +146,8 @@ pub const Arguments = struct {
clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx, ts, css") catch unreachable,
clap.parseParam("--origin <STR> Rewrite import paths to start with --origin. Default: \"\"") catch unreachable,
clap.parseParam("--port <STR> Port to serve Bun's dev server on. Default: \"/3000\"") catch unreachable,
clap.parseParam("--port <STR> Port to serve Bun's dev server on. Default: \"3000\"") catch unreachable,
clap.parseParam("--silent Don't repeat the command for bun run") catch unreachable,
// clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
// clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
@@ -223,6 +224,8 @@ pub const Arguments = struct {
.disable_hmr = args.flag("--disable-hmr"),
};
ctx.positionals = args.positionals();
ctx.debug.silent = args.flag("--silent");
if (opts.port != null and opts.origin == null) {
opts.origin = try std.fmt.allocPrint(allocator, "http://localhost:{d}/", .{opts.port.?});
}
@@ -295,13 +298,15 @@ pub const Arguments = struct {
}
const production = false; //args.flag("--production");
var write = entry_points.len > 1 or output_dir != null;
if (write and output_dir == null) {
var _paths = [_]string{ cwd, "out" };
output_dir = try std.fs.path.resolve(allocator, &_paths);
if (comptime cmd == .BuildCommand) {
var write = entry_points.len > 1 or output_dir != null;
if (write and output_dir == null) {
var _paths = [_]string{ cwd, "out" };
output_dir = try std.fs.path.resolve(allocator, &_paths);
}
opts.write = write;
}
opts.write = write;
opts.entry_points = entry_points;
var jsx_factory = args.option("--jsx-factory");
@@ -452,14 +457,16 @@ const HelpCommand = struct {
const fmt =
\\> <r> <b><green>dev <r><d> ./a.ts ./b.jsx<r> Start a Bun Dev Server
\\> <r> <b><magenta>bun <r><d> ./a.ts ./b.jsx<r> Bundle dependencies of input files into a <r><magenta>.bun<r>
\\> <r> <b><cyan>create <r><d> next ./app<r> Start a new project from a template <d>(shorthand: c)<r>
\\> <r> <b><blue>discord <r> Open Bun's Discord server
\\> <r> <b><green>run <r><d> test <r> Run a package.json script or executable<r>
\\> <r> <b><cyan>create <r><d>next ./app<r> Start a new project from a template <d>(shorthand: c)<r>
\\> <r> <b><blue>upgrade <r> Get the latest version of Bun
\\> <r> <b><d>discord <r> Open Bun's Discord server
\\> <r> <b><d>help <r> Print this help menu
\\
;
switch (reason) {
.explicit => Output.pretty("<r><b><magenta>Bun<r>: a fast bundler & transpiler for web software.\n\n" ++ fmt, .{}),
.explicit => Output.pretty("<r><b><magenta>Bun<r>: a fast bundler, transpiler and task runner for web software.\n\n" ++ fmt, .{}),
.invalid_command => Output.prettyError("<r><red>Uh-oh<r> not sure what to do with that command.\n\n" ++ fmt, .{}),
}
} else {
@@ -470,13 +477,14 @@ const HelpCommand = struct {
\\> <r> <b><cyan>create<r><d> next ./app<r> Start a new project from a template<r>
\\> <r> <b><magenta>bun <r><d> ./a.ts ./b.jsx<r> Bundle dependencies of input files into a <r><magenta>.bun<r>
\\> <r> <green>run <r><d> ./a.ts <r> Run a JavaScript-like file with Bun.js
\\> <r> <b><blue>discord<r> Open Bun's Discord server
\\> <r> <b><blue>discord<r> Open Bun's Discord server
\\> <r> <b><blue>upgrade <r> Get the latest version of Bun
\\> <r> <b><d>help <r> Print this help menu
\\
;
switch (reason) {
.explicit => Output.pretty("<r><b><magenta>Bun<r>: a fast bundler & transpiler for web software.\n\n" ++ fmt, .{dirname}),
.explicit => Output.pretty("<r><b><magenta>Bun<r>: a fast bundler, transpiler and task runner for web software.\n\n" ++ fmt, .{dirname}),
.invalid_command => Output.prettyError("<r><red>Uh-oh<r> not sure what to do with that command.\n\n" ++ fmt, .{dirname}),
}
}
@@ -522,6 +530,7 @@ pub const Command = struct {
pub const DebugOptions = struct {
dump_environment_variables: bool = false,
fallback_only: bool = false,
silent: bool = false,
};
pub const Context = struct {
@@ -529,6 +538,7 @@ pub const Command = struct {
args: Api.TransformOptions = std.mem.zeroes(Api.TransformOptions),
log: *logger.Log,
allocator: *std.mem.Allocator,
positionals: []const string = &[_]string{},
debug: DebugOptions = DebugOptions{},
@@ -570,6 +580,7 @@ pub const Command = struct {
RootCommandMatcher.case("init") => .InitCommand,
RootCommandMatcher.case("bun") => .BunCommand,
RootCommandMatcher.case("discord") => .DiscordCommand,
RootCommandMatcher.case("upgrade") => .UpgradeCommand,
RootCommandMatcher.case("c"), RootCommandMatcher.case("create") => .CreateCommand,
RootCommandMatcher.case("b"), RootCommandMatcher.case("build") => .BuildCommand,
@@ -586,6 +597,7 @@ pub const Command = struct {
RootCommandMatcher.case("discord") => .DiscordCommand,
RootCommandMatcher.case("d"), RootCommandMatcher.case("dev") => .DevCommand,
RootCommandMatcher.case("c"), RootCommandMatcher.case("create") => .CreateCommand,
RootCommandMatcher.case("upgrade") => .UpgradeCommand,
RootCommandMatcher.case("help") => .HelpCommand,
else => .AutoCommand,
@@ -644,8 +656,14 @@ pub const Command = struct {
},
.RunCommand => {
const ctx = try Command.Context.create(allocator, log, .RunCommand);
try RunCommand.exec(ctx);
if (ctx.positionals.len > 0) {
_ = try RunCommand.exec(ctx, false, true);
}
},
.UpgradeCommand => {
const ctx = try Command.Context.create(allocator, log, .UpgradeCommand);
try UpgradeCommand.exec(ctx);
return;
},
.AutoCommand => {
var ctx = Command.Context.create(allocator, log, .AutoCommand) catch |e| {
@@ -667,6 +685,12 @@ pub const Command = struct {
return;
}
if (ctx.positionals.len > 0 and (std.fs.path.extension(ctx.positionals[0]).len == 0)) {
if (try RunCommand.exec(ctx, true, false)) {
return;
}
}
if (FeatureFlags.dev_only) {
try DevCommand.exec(ctx);
} else {
@@ -687,5 +711,6 @@ pub const Command = struct {
AutoCommand,
HelpCommand,
CreateCommand,
UpgradeCommand,
};
};

114
src/cli/install.sh Normal file
View File

@@ -0,0 +1,114 @@
#!/bin/bash
# Reset
Color_Off=''
# Regular Colors
Red=''
Green=''
# Bold
BWhite=''
BGreen=''
if test -t 1; then
# Reset
Color_Off='\033[0m' # Text Reset
# Regular Colors
Red='\033[0;31m' # Red
Green='\033[0;32m' # Green
White='\033[0;37m' # White
# Bold
BGreen='\033[1;32m' # Green
BWhite='\033[1;37m' # White
fi
if ! command -v unzip >/dev/null; then
echo -e "${Red}error${Color_Off}: unzip is required to install Bun (see: https://github.com/Jarred-Sumner/bun#unzip-is-required)." 1>&2
exit 1
fi
if [ "$OS" = "Windows_NT" ]; then
echo "error: Please install Bun using Windows Subsystem for Linux."
exit 1
else
case $(uname -sm) in
"Darwin x86_64") target="darwin-x64" ;;
"Darwin arm64") target="darwin-aarch64" ;;
*) target="linux-x64" ;;
esac
fi
github_repo="https://github.com/Jarred-Sumner/bun-releases-for-updater"
if [ $# -eq 0 ]; then
bun_uri="$github_repo/releases/latest/download/bun-${target}.zip"
else
bun_uri="$github_repo/releases/download/${1}/bun-${target}.zip"
fi
bun_install="${BUN_INSTALL:-$HOME/.bun}"
bin_dir="$bun_install/bin"
exe="$bin_dir/bun"
if [ ! -d "$bin_dir" ]; then
mkdir -p "$bin_dir"
fi
curl --fail --location --progress-bar --output "$exe.zip" "$bun_uri"
if (( $? )); then
echo -e "${Red}error${Color_Off}: Failed to download Bun from $bun_uri" 1>&2
exit 1
fi
unzip -d "$bin_dir" -q -o "$exe.zip"
if (( $? )); then
echo -e "${Red}error${Color_Off}: Failed to extract Bun" 1>&2
exit 1
fi
mv "$bin_dir/bun-${target}/bun" "$exe"
if (( $? )); then
echo -e "${Red}error${Color_Off}: Failed to extract Bun" 1>&2
exit 1
fi
chmod +x "$exe"
if (( $? )); then
echo -e "${Red}error${Color_Off}: Failed to set permissions on bun executable." 1>&2
exit 1
fi
rmdir $bin_dir/bun-${target}
rm "$exe.zip"
echo -e "${Green}Bun was installed successfully to ${BGreen}$exe$Color_Off"
if command -v bun --version >/dev/null; then
echo "Run 'bun --help' to get started"
exit 0
fi
if test $(basename $SHELL) == "fish"; then
echo ""
echo "Manually add the directory to your \$HOME/.config/fish"
echo ""
echo -e " $BWhite set -Ux BUN_INSTALL \"$bun_install\"$Color_Off"
echo -e " $BWhite set -px --path PATH \"$bin_dir\"$Color_Off"
elif test $(basename $SHELL) == "zsh"; then
echo ""
echo "Manually add the directory to your \$HOME/.zshrc (or similar)"
echo ""
echo -e " $BWhite export BUN_INSTALL=\"$bun_install$Color_Off"
echo -e " $BWhite export PATH=\"\$BUN_INSTALL/bin:\$PATH\"$Color_Off"
else
echo ""
echo "Manually add the directory to your \$HOME/.bashrc (or similar)"
echo ""
echo -e " $BWhiteexport BUN_INSTALL=\"$bun_install\"$Color_Off"
echo -e " $BWhiteexport PATH=\"\$BUN_INSTALL/bin:\$PATH\"$Color_Off"
fi
echo ""
echo -e "To get started, run"
echo -e "$BWhite"
echo -e " bun --help$Color_Off"

View File

@@ -0,0 +1,107 @@
const std = @import("std");
// yarn v2.3 commands
const yarn_v2 = [_][]const u8{
"add",
"bin",
"cache",
"config",
"dedupe",
"dlx",
"exec",
"explain",
"info",
"init",
"install",
"link",
"node",
"npm",
"pack",
"patch",
"plugin",
"rebuild",
"remove",
"run",
"set",
"unplug",
"up",
"why",
"workspace",
"workspaces",
};
// yarn v1 commands
const yarn_v1 = [_][]const u8{
"access",
"add",
"audit",
"autoclean",
"bin",
"cache",
"check",
"config",
"create",
"exec",
"generate-lock-entry",
"generateLockEntry",
"global",
"help",
"import",
"info",
"init",
"install",
"licenses",
"link",
"list",
"login",
"logout",
"node",
"outdated",
"owner",
"pack",
"policies",
"publish",
"remove",
"run",
"tag",
"team",
"unlink",
"unplug",
"upgrade",
"upgrade-interactive",
"upgradeInteractive",
"version",
"versions",
"why",
"workspace",
"workspaces",
};
pub const all_yarn_commands = brk: {
@setEvalBranchQuota(9999);
var array: [yarn_v2.len + yarn_v1.len]u64 = undefined;
var array_i: usize = 0;
for (yarn_v2) |yarn| {
const hash = std.hash.Wyhash.hash(0, yarn);
@setEvalBranchQuota(9999);
if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) {
@setEvalBranchQuota(9999);
array[array_i] = hash;
array_i += 1;
}
}
for (yarn_v1) |yarn| {
@setEvalBranchQuota(9999);
const hash = std.hash.Wyhash.hash(0, yarn);
if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) {
@setEvalBranchQuota(9999);
array[array_i] = hash;
array_i += 1;
}
}
break :brk array[0..array_i];
};

View File

@@ -20,9 +20,582 @@ const configureTransformOptionsForBun = @import(".././javascript/jsc/config.zig"
const Command = @import("../cli.zig").Command;
const bundler = @import("../bundler.zig");
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
const DotEnv = @import("../env_loader.zig");
const which = @import("../which.zig").which;
var path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var path_buf2: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const NpmArgs = struct {
// https://github.com/npm/rfcs/blob/main/implemented/0021-reduce-lifecycle-script-environment.md#detailed-explanation
pub const package_name: string = "npm_package_name";
pub const package_version: string = "npm_package_version";
};
const yarn_commands: []u64 = @import("./list-of-yarn-commands.zig").all_yarn_commands;
pub const RunCommand = struct {
pub fn exec(ctx: Command.Context) !void {
Output.prettyErrorln("<r><red>nOt<r> <magenta>iMpLeMeNtEd<r> <yellow>yEt<r>", .{});
const shells_to_search = &[_]string{
"bash",
"sh",
"zsh",
};
pub fn findShell(PATH: string, cwd: string) ?string {
inline for (shells_to_search) |shell| {
if (which(&path_buf, PATH, cwd, shell)) |shell_| {
return shell_;
}
}
return null;
}
const BUN_BIN_NAME = if (isDebug) "bun-debug" else "bun";
const BUN_RUN = std.fmt.comptimePrint("{s} run", .{BUN_BIN_NAME});
pub fn runPackageScript(
ctx: Command.Context,
original_script: string,
name: string,
cwd: string,
env: *DotEnv.Loader,
passthrough: []const string,
silent: bool,
) !bool {
const shell_bin = findShell(env.map.get("PATH") orelse "", cwd) orelse return error.MissingShell;
var script = original_script;
var copy_script = try std.ArrayList(u8).initCapacity(ctx.allocator, script.len);
// Look for invocations of any:
// - yarn run
// - pnpm run
// - npm run
// Replace them with "bun run"
// If "yarn" exists and
var splitter = std.mem.split(u8, script, " ");
var is_first = true;
var skip_next = false;
while (splitter.next()) |entry_| {
const skip = skip_next;
skip_next = false;
var entry = entry_;
if (strings.startsWith(entry, "\\\"") and strings.endsWith(entry, "\\\"") and entry.len > 4) {
entry = entry[2 .. entry.len - 2];
}
if (strings.startsWith(entry, "'") and strings.endsWith(entry, "'") and entry.len > 2) {
entry = entry[1 .. entry.len - 1];
}
var replace = false;
defer is_first = false;
if (!skip) {
replacer: {
if (strings.eqlComptime(entry, "yarn")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
// "yarn npm" is a valid command
// this will confuse us
// so when we have a valid yarn command, rather than try to carefully parse & handle each version's arguments
// we just skip the command that says "yarn npm"
// this works because yarn is the only package manager that lets you omit "run"
// (bun is not a package manager)
const hash = std.hash.Wyhash.hash(0, entry2);
if (std.mem.indexOfScalar(u64, yarn_commands, hash) != null) {
skip_next = true;
break :replacer;
}
replace = true;
break :replacer;
}
}
if (strings.eqlComptime(entry, "pnpm")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
}
}
if (strings.eqlComptime(entry, "npm")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
}
}
}
}
if (replace) {
if (!is_first) {
copy_script.append(' ') catch unreachable;
}
try copy_script.appendSlice(BUN_RUN);
} else {
if (!is_first) {
copy_script.append(' ') catch unreachable;
}
try copy_script.appendSlice(entry);
}
}
var combined_script: string = copy_script.items;
if (passthrough.len > 0) {
var combined_script_len: usize = script.len;
for (passthrough) |p, i| {
combined_script_len += p.len + 1;
}
var combined_script_buf = try ctx.allocator.alloc(u8, combined_script_len);
std.mem.copy(u8, combined_script_buf, script);
var remaining_script_buf = combined_script_buf[script.len..];
for (passthrough) |p| {
remaining_script_buf[0] = ' ';
std.mem.copy(u8, remaining_script_buf[1..], p);
remaining_script_buf = remaining_script_buf[p.len + 1 ..];
}
combined_script = combined_script_buf;
}
var argv = [_]string{ shell_bin, "-c", combined_script };
var child_process = try std.ChildProcess.init(&argv, ctx.allocator);
if (!silent) {
Output.prettyErrorln("<r><d><magenta>$<r> <d><b>{s}<r>", .{combined_script});
Output.flush();
}
var buf_map = try env.map.cloneToBufMap(ctx.allocator);
child_process.env_map = &buf_map;
child_process.cwd = cwd;
child_process.stderr_behavior = .Inherit;
child_process.stdin_behavior = .Inherit;
child_process.stdout_behavior = .Inherit;
const result = child_process.spawnAndWait() catch |err| {
Output.prettyErrorln("<r><red>error<r>: Failed to run script <b>{s}<r> due to error <b>{s}<r>", .{ name, @errorName(err) });
Output.flush();
return true;
};
if (result.Exited > 0) {
Output.prettyErrorln("<r><red>Script error<r> <b>\"{s}\"<r> exited with {d} status<r>", .{ name, result.Exited });
Output.flush();
std.os.exit(@truncate(u8, result.Signal));
}
return true;
}
pub fn runBinary(
ctx: Command.Context,
executable: []const u8,
cwd: string,
env: *DotEnv.Loader,
passthrough: []const string,
) !bool {
var argv_ = [_]string{executable};
var argv: []const string = &argv_;
if (passthrough.len > 0) {
var array_list = std.ArrayList(string).init(ctx.allocator);
try array_list.append(executable);
try array_list.appendSlice(passthrough);
argv = array_list.toOwnedSlice();
}
var child_process = try std.ChildProcess.init(argv, ctx.allocator);
var buf_map = try env.map.cloneToBufMap(ctx.allocator);
child_process.cwd = cwd;
child_process.env_map = &buf_map;
child_process.stderr_behavior = .Inherit;
child_process.stdin_behavior = .Inherit;
child_process.stdout_behavior = .Inherit;
const result = child_process.spawnAndWait() catch |err| {
Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{s}<r> due to error <b>{s}<r>", .{ std.fs.path.basename(executable), @errorName(err) });
Output.flush();
return false;
};
if (result.Exited > 0) {
Output.prettyErrorln("<r><red>error<r> <b>\"{s}\"<r> exited with {d} status<r>", .{ std.fs.path.basename(executable), result.Exited });
Output.flush();
std.os.exit(@truncate(u8, result.Signal));
}
return true;
}
pub fn ls(ctx: Command.Context) !void {
var args = ctx.args;
args.node_modules_bundle_path = null;
args.node_modules_bundle_path_server = null;
args.generate_node_module_bundle = false;
var this_bundler = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, null);
this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
this_bundler.options.env.prefix = "";
this_bundler.resolver.care_about_bin_folder = true;
this_bundler.resolver.care_about_scripts = true;
this_bundler.configureLinker();
}
pub fn exec(ctx: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool {
var args = ctx.args;
args.node_modules_bundle_path = null;
args.node_modules_bundle_path_server = null;
args.generate_node_module_bundle = false;
var this_bundler = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, null);
this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
this_bundler.options.env.prefix = "";
this_bundler.env.quiet = true;
this_bundler.resolver.care_about_bin_folder = true;
this_bundler.resolver.care_about_scripts = true;
defer {
this_bundler.resolver.care_about_bin_folder = false;
this_bundler.resolver.care_about_scripts = false;
}
this_bundler.configureLinker();
var positionals = ctx.positionals;
if (positionals.len > 0 and strings.eqlComptime(positionals[0], "run") or strings.eqlComptime(positionals[0], "r")) {
positionals = positionals[1..];
}
var root_dir_info = this_bundler.resolver.readDirInfo(this_bundler.fs.top_level_dir) catch |err| {
if (!log_errors) return false;
if (Output.enable_ansi_colors) {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
} else {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)});
Output.flush();
return err;
} orelse {
if (Output.enable_ansi_colors) {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
} else {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
Output.prettyErrorln("Error loading current directory", .{});
Output.flush();
return error.CouldntReadCurrentDirectory;
};
var package_json_dir: string = "";
{
this_bundler.env.loadProcess();
if (this_bundler.env.map.get("NODE_ENV")) |node_env| {
if (strings.eqlComptime(node_env, "production")) {
this_bundler.options.production = true;
}
}
// Run .env in the root dir
this_bundler.runEnvLoader() catch {};
if (root_dir_info.getEntries()) |dir| {
// Run .env again if it exists in a parent dir
if (this_bundler.options.production) {
this_bundler.env.load(&this_bundler.fs.fs, dir, false) catch {};
} else {
this_bundler.env.load(&this_bundler.fs.fs, dir, true) catch {};
}
}
}
var bin_dirs = this_bundler.resolver.binDirs();
if (root_dir_info.enclosing_package_json) |package_json| {
if (root_dir_info.package_json == null) {
// no trailing slash
package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/");
}
}
var PATH = this_bundler.env.map.get("PATH") orelse "";
var ORIGINAL_PATH = PATH;
if (bin_dirs.len > 0 or package_json_dir.len > 0) {
var new_path_len: usize = PATH.len + 2;
for (bin_dirs) |bin| {
new_path_len += bin.len + 1;
}
if (package_json_dir.len > 0) {
new_path_len += package_json_dir.len + 1;
}
var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len);
{
var needs_colon = false;
if (package_json_dir.len > 0) {
defer needs_colon = true;
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(package_json_dir);
}
var bin_dir_i: i32 = @intCast(i32, bin_dirs.len) - 1;
// Iterate in reverse order
// Directories are added to bin_dirs in top-down order
// That means the parent-most node_modules/.bin will be first
while (bin_dir_i >= 0) : (bin_dir_i -= 1) {
defer needs_colon = true;
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(bin_dirs[@intCast(usize, bin_dir_i)]);
}
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(PATH);
}
this_bundler.env.map.put("PATH", new_path.items) catch unreachable;
PATH = new_path.items;
}
var script_name_to_search: string = "";
if (positionals.len > 0) {
script_name_to_search = positionals[0];
}
var passthrough: []const string = &[_]string{};
var passthrough_list = std.ArrayList(string).init(ctx.allocator);
if (script_name_to_search.len > 0) {
get_passthrough: {
// If they explicitly pass "--", that means they want everything after that to be passed through.
for (std.os.argv) |argv, i| {
if (strings.eqlComptime(std.mem.span(argv), "--")) {
if (std.os.argv.len > i + 1) {
var count: usize = 0;
for (std.os.argv[i + 1 ..]) |arg| {
count += 1;
}
try passthrough_list.ensureTotalCapacity(count);
for (std.os.argv[i + 1 ..]) |arg| {
passthrough_list.appendAssumeCapacity(std.mem.span(arg));
}
passthrough = passthrough_list.toOwnedSlice();
break :get_passthrough;
}
}
}
// If they do not pass "--", assume they want everything after the script name to be passed through.
for (std.os.argv) |argv, i| {
if (strings.eql(std.mem.span(argv), script_name_to_search)) {
if (std.os.argv.len > i + 1) {
try passthrough_list.ensureTotalCapacity(std.os.argv[i + 1 ..].len);
for (std.os.argv[i + 1 ..]) |arg| {
passthrough_list.appendAssumeCapacity(std.mem.span(arg));
}
passthrough = passthrough_list.toOwnedSlice();
break :get_passthrough;
}
}
}
}
}
var did_print = false;
if (root_dir_info.enclosing_package_json) |package_json| {
if (package_json.name.len > 0) {
if (this_bundler.env.map.get(NpmArgs.package_name) == null) {
this_bundler.env.map.put(NpmArgs.package_name, package_json.name) catch unreachable;
}
}
if (package_json.version.len > 0) {
if (this_bundler.env.map.get(NpmArgs.package_version) == null) {
this_bundler.env.map.put(NpmArgs.package_version, package_json.version) catch unreachable;
}
}
if (package_json.scripts) |scripts| {
switch (script_name_to_search.len) {
0 => {
var display_name = package_json.name;
if (display_name.len == 0) {
display_name = std.fs.path.basename(package_json.source.path.name.dir);
}
var iterator = scripts.iterator();
if (scripts.count() > 0) {
did_print = true;
Output.prettyln("<r><blue><b>{s}<r> scripts:<r>\n", .{display_name});
var is_first = true;
while (iterator.next()) |entry| {
Output.prettyln("\n", .{});
Output.prettyln(" bun run <blue>{s}<r>\n", .{entry.key_ptr.*});
Output.prettyln(" <d> {s}<r>\n", .{entry.value_ptr.*});
}
Output.prettyln("\n<d>{d} scripts<r>", .{scripts.count()});
Output.flush();
return true;
} else {
Output.prettyln("<r><blue><b>{s}<r> has no \"scripts\" in package.json.", .{display_name});
Output.flush();
return true;
}
},
else => {
if (scripts.get(script_name_to_search)) |script_content| {
// allocate enough to hold "post${scriptname}"
var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{s}", .{script_name_to_search});
if (scripts.get(temp_script_buffer[1..])) |prescript| {
if (!try runPackageScript(
ctx,
prescript,
temp_script_buffer[1..],
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) {
return false;
}
}
if (!try runPackageScript(
ctx,
script_content,
script_name_to_search,
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) return false;
std.mem.copy(u8, temp_script_buffer, "post");
if (scripts.get(temp_script_buffer)) |postscript| {
if (!try runPackageScript(
ctx,
postscript,
temp_script_buffer,
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) {
return false;
}
}
return true;
}
},
}
}
}
if (script_name_to_search.len == 0) {
if (comptime log_errors) {
Output.prettyError("<r>No \"scripts\" in package.json found.", .{});
Output.flush();
std.os.exit(0);
}
return false;
}
var path_for_which = PATH;
if (comptime bin_dirs_only) {
path_for_which = "";
if (ORIGINAL_PATH.len < PATH.len) {
path_for_which = PATH[0 .. PATH.len - (ORIGINAL_PATH.len + 1)];
}
}
if (path_for_which.len > 0) {
if (which(&path_buf, path_for_which, this_bundler.fs.top_level_dir, script_name_to_search)) |destination| {
var file = std.fs.openFileAbsoluteZ(destination, .{ .read = true }) catch |err| {
if (!log_errors) return false;
Output.prettyErrorln("<r>error: <red>{s}<r> opening file: \"{s}\"", .{ err, std.mem.span(destination) });
Output.flush();
return err;
};
var outbuf = std.os.getFdPath(file.handle, &path_buf2) catch |err| {
if (!log_errors) return false;
Output.prettyErrorln("<r>error: <red>{s}<r> resolving file: \"{s}\"", .{ err, std.mem.span(destination) });
Output.flush();
return err;
};
file.close();
return try runBinary(
ctx,
try this_bundler.fs.dirname_store.append([]u8, outbuf),
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
);
}
}
if (comptime log_errors) {
Output.prettyError("<r><red>error:<r> Missing script: <b>{s}<r>\n", .{script_name_to_search});
Output.flush();
std.os.exit(0);
}
return false;
}
};

568
src/cli/upgrade_command.zig Normal file
View File

@@ -0,0 +1,568 @@
usingnamespace @import("../global.zig");
const std = @import("std");
const lex = @import("../js_lexer.zig");
const logger = @import("../logger.zig");
const alloc = @import("../alloc.zig");
const options = @import("../options.zig");
const js_parser = @import("../js_parser.zig");
const js_ast = @import("../js_ast.zig");
const linker = @import("../linker.zig");
usingnamespace @import("../ast/base.zig");
usingnamespace @import("../defines.zig");
const panicky = @import("../panic_handler.zig");
const allocators = @import("../allocators.zig");
const sync = @import(".././sync.zig");
const Api = @import("../api/schema.zig").Api;
const resolve_path = @import("../resolver/resolve_path.zig");
const configureTransformOptionsForBun = @import("../javascript/jsc/config.zig").configureTransformOptionsForBun;
const Command = @import("../cli.zig").Command;
const bundler = @import("../bundler.zig");
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
const fs = @import("../fs.zig");
const URL = @import("../query_string_map.zig").URL;
const HTTPClient = @import("../http_client.zig");
const ParseJSON = @import("../json_parser.zig").ParseJSON;
const Archive = @import("../libarchive/libarchive.zig").Archive;
const Zlib = @import("../zlib.zig");
const JSPrinter = @import("../js_printer.zig");
const DotEnv = @import("../env_loader.zig");
const NPMClient = @import("../which_npm_client.zig").NPMClient;
const which = @import("../which.zig").which;
const clap = @import("clap");
const Lock = @import("../lock.zig").Lock;
const Headers = @import("../javascript/jsc/webcore/response.zig").Headers;
const CopyFile = @import("../copy_file.zig");
pub var initialized_store = false;
pub fn initializeStore() void {
if (initialized_store) return;
initialized_store = true;
js_ast.Expr.Data.Store.create(default_allocator);
js_ast.Stmt.Data.Store.create(default_allocator);
}
pub const Version = struct {
zip_url: string,
tag: string,
buf: MutableString,
size: u32 = 0,
pub fn name(this: Version) ?string {
if (this.tag.len > "bun-v".len and strings.eqlComptime(this.tag[0.."bun-v".len], "bun-v")) {
return this.tag[("bun-v".len)..];
} else {
return null;
}
}
pub const platform_label = if (Environment.isMac) "darwin" else "linux";
pub const arch_label = if (Environment.isAarch64) "aarch64" else "x64";
pub const triplet = platform_label ++ "-" ++ arch_label;
pub const folder_name = "bun-" ++ triplet;
pub const zip_filename = folder_name ++ ".zip";
const current_version: string = "bun-v" ++ Global.package_json_version;
pub fn isCurrent(this: Version) bool {
return strings.eqlComptime(this.tag, current_version);
}
};
pub const UpgradeCheckerThread = struct {
var update_checker_thread: std.Thread = undefined;
pub fn spawn(env_loader: *DotEnv.Loader) void {
if (env_loader.map.get("BUN_DISABLE_UPGRADE_CHECK") != null or env_loader.map.get("CI") != null) return;
update_checker_thread = std.Thread.spawn(.{}, run, .{env_loader}) catch return;
update_checker_thread.detach();
}
fn _run(env_loader: *DotEnv.Loader) anyerror!void {
var rand = std.rand.DefaultPrng.init(@intCast(u64, @maximum(std.time.milliTimestamp(), 0)));
const delay = rand.random.intRangeAtMost(u64, 100, 10000);
std.time.sleep(std.time.ns_per_ms * delay);
Output.Source.configureThread();
const version = (try UpgradeCommand.getLatestVersion(default_allocator, env_loader, undefined, undefined, true)) orelse return;
if (!version.isCurrent()) {
if (version.name()) |name| {
Output.prettyErrorln("\n<r><d>Bun v{s} is out. Run <b><cyan>bun upgrade<r> to upgrade.\n", .{name});
Output.flush();
}
}
}
fn run(env_loader: *DotEnv.Loader) void {
_run(env_loader) catch |err| {
if (Environment.isDebug) {
std.debug.print("\n[UpgradeChecker] ERROR: {s}\n", .{@errorName(err)});
}
};
}
};
pub const UpgradeCommand = struct {
pub const timeout: u32 = 30000;
const default_github_headers = "Acceptapplication/vnd.github.v3+json";
var github_repository_url_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var current_executable_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var unzip_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var tmpdir_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
pub fn getLatestVersion(
allocator: *std.mem.Allocator,
env_loader: *DotEnv.Loader,
refresher: *std.Progress,
progress: *std.Progress.Node,
comptime silent: bool,
) !?Version {
var headers_buf: string = default_github_headers;
var header_entries: Headers.Entries = .{};
const accept = Headers.Kv{
.name = Api.StringPointer{ .offset = 0, .length = @intCast(u32, "Accept".len) },
.value = Api.StringPointer{ .offset = @intCast(u32, "Accept".len), .length = @intCast(u32, "application/vnd.github.v3+json".len) },
};
try header_entries.append(allocator, accept);
// Incase they're using a GitHub proxy in e.g. China
var github_api_domain: string = "api.github.com";
if (env_loader.map.get("GITHUB_API_DOMAIN")) |api_domain| {
if (api_domain.len > 0) {
github_api_domain = api_domain;
}
}
var api_url = URL.parse(
try std.fmt.bufPrint(
&github_repository_url_buf,
"https://{s}/repos/Jarred-Sumner/bun-releases-for-updater/releases/latest",
.{
github_api_domain,
},
),
);
var client = HTTPClient.init(
allocator,
.GET,
api_url,
header_entries,
headers_buf,
);
client.timeout = timeout;
if (!silent) {
client.progress_node = progress;
}
if (env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| {
if (access_token.len > 0) {
headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "Access-TokenBearer {s}", .{access_token});
try header_entries.append(
allocator,
Headers.Kv{
.name = Api.StringPointer{
.offset = accept.value.length + accept.value.offset,
.length = @intCast(u32, "Access-Token".len),
},
.value = Api.StringPointer{
.offset = @intCast(u32, accept.value.length + accept.value.offset + "Access-Token".len),
.length = @intCast(u32, access_token.len),
},
},
);
}
}
var metadata_body = try MutableString.init(allocator, 2048);
var response = try client.send("", &metadata_body);
switch (response.status_code) {
404 => return error.HTTP404,
403 => return error.HTTPForbidden,
429 => return error.HTTPTooManyRequests,
499...599 => return error.GitHubIsDown,
200 => {},
else => return error.HTTPError,
}
var log = logger.Log.init(allocator);
var source = logger.Source.initPathString("releases.json", metadata_body.list.items);
initializeStore();
var expr = ParseJSON(&source, &log, allocator) catch |err| {
if (!silent) {
progress.end();
refresher.refresh();
if (log.errors > 0) {
if (Output.enable_ansi_colors) {
try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
} else {
try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
}
Output.flush();
std.os.exit(1);
} else {
Output.prettyErrorln("Error parsing releases from GitHub: <r><red>{s}<r>", .{@errorName(err)});
Output.flush();
std.os.exit(1);
}
}
return null;
};
if (log.errors > 0) {
if (comptime !silent) {
progress.end();
refresher.refresh();
if (Output.enable_ansi_colors) {
try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
} else {
try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
}
Output.flush();
std.os.exit(1);
}
return null;
}
var version = Version{ .zip_url = "", .tag = "", .buf = metadata_body, .size = 0 };
if (expr.data != .e_object) {
if (comptime !silent) {
progress.end();
refresher.refresh();
const json_type: js_ast.Expr.Tag = @as(js_ast.Expr.Tag, expr.data);
Output.prettyErrorln("JSON error - expected an object but received {s}", .{@tagName(json_type)});
Output.flush();
std.os.exit(1);
}
return null;
}
if (expr.asProperty("tag_name")) |tag_name_| {
if (tag_name_.expr.asString(allocator)) |tag_name| {
version.tag = tag_name;
}
}
if (version.tag.len == 0) {
if (comptime !silent) {
progress.end();
refresher.refresh();
Output.prettyErrorln("JSON Error parsing releases from GitHub: <r><red>tag_name<r> is missing?\n{s}", .{metadata_body.list.items});
Output.flush();
std.os.exit(1);
}
return null;
}
get_asset: {
const assets_ = expr.asProperty("assets") orelse break :get_asset;
var assets = assets_.expr.asArray() orelse break :get_asset;
while (assets.next()) |asset| {
if (asset.asProperty("content_type")) |content_type| {
const content_type_ = (content_type.expr.asString(allocator)) orelse continue;
if (comptime isDebug) {
Output.prettyln("Content-type: {s}", .{content_type_});
Output.flush();
}
if (!strings.eqlComptime(content_type_, "application/zip")) continue;
}
if (asset.asProperty("name")) |name_| {
if (name_.expr.asString(allocator)) |name| {
if (comptime isDebug) {
Output.prettyln("Comparing {s} vs {s}", .{name, Version.zip_filename});
Output.flush();
}
if (strings.eqlComptime(name, Version.zip_filename)) {
version.zip_url = (asset.asProperty("browser_download_url") orelse break :get_asset).expr.asString(allocator) orelse break :get_asset;
if (comptime isDebug) {
Output.prettyln("Found Zip {s}", .{version.zip_url});
Output.flush();
}
if (asset.asProperty("size")) |size_| {
if (size_.expr.data == .e_number) {
version.size = @intCast(u32, @maximum(@floatToInt(i32, std.math.ceil(size_.expr.data.e_number.value)), 0));
}
}
return version;
}
}
}
}
}
if (comptime !silent) {
progress.end();
refresher.refresh();
if (version.name()) |name| {
Output.prettyErrorln("Bun v{s} is out, but not for this platform ({s}) yet.", .{
name, Version.triplet,
});
}
Output.flush();
std.os.exit(0);
}
version.buf.deinit();
return null;
}
const exe_subpath = Version.folder_name ++ std.fs.path.sep_str ++ "bun";
pub fn exec(ctx: Command.Context) !void {
var filesystem = try fs.FileSystem.init1(ctx.allocator, null);
var env_loader: DotEnv.Loader = brk: {
var map = try ctx.allocator.create(DotEnv.Map);
map.* = DotEnv.Map.init(ctx.allocator);
break :brk DotEnv.Loader.init(map, ctx.allocator);
};
env_loader.loadProcess();
var version: Version = undefined;
{
var refresher = std.Progress{};
var progress = try refresher.start("Fetching version tags", 0);
version = (try getLatestVersion(ctx.allocator, &env_loader, &refresher, progress, false)) orelse return;
progress.end();
refresher.refresh();
if (version.name() != null and version.isCurrent()) {
Output.prettyErrorln(
"<r><green>Congrats!<r> You're already on the latest version of Bun <d>(which is v{s})<r>",
.{
version.name().?,
},
);
Output.flush();
std.os.exit(0);
}
if (version.name() == null) {
Output.prettyErrorln(
"<r><red>error:<r> Bun versions are currently unavailable (the latest version name didn't match the expeccted format)",
.{},
);
Output.flush();
std.os.exit(1);
}
}
{
Output.prettyErrorln("<r><b>Bun <cyan>v{s}<r> is out<r>! You're on <blue>{s}<r>\n", .{ version.name().?, Global.package_json_version });
Output.flush();
var refresher = std.Progress{};
var progress = try refresher.start("Downloading", version.size);
refresher.refresh();
var client = HTTPClient.init(
ctx.allocator,
.GET,
URL.parse(version.zip_url),
.{},
"",
);
client.timeout = timeout;
client.progress_node = progress;
var zip_file_buffer = try MutableString.init(ctx.allocator, @maximum(version.size, 1024));
var response = try client.send(
"",
&zip_file_buffer,
);
switch (response.status_code) {
404 => return error.HTTP404,
403 => return error.HTTPForbidden,
429 => return error.HTTPTooManyRequests,
499...599 => return error.GitHubIsDown,
200 => {},
else => return error.HTTPError,
}
var bytes = zip_file_buffer.toOwnedSliceLeaky();
progress.end();
refresher.refresh();
if (bytes.len == 0) {
Output.prettyErrorln("<r><red>error:<r> Failed to download the latest version of Bun. Received empty content", .{});
Output.flush();
std.os.exit(1);
}
const version_name = version.name().?;
var save_dir_ = filesystem.tmpdir();
var save_dir = save_dir_.makeOpenPath(version_name, .{ .iterate = true }) catch |err| {
Output.prettyErrorln("<r><red>error:<r> Failed to open temporary directory", .{});
Output.flush();
std.os.exit(1);
};
var tmpdir_path = std.os.getFdPath(save_dir.fd, &tmpdir_path_buf) catch |err| {
Output.prettyErrorln("<r><red>error:<r> Failed to read temporary directory", .{});
Output.flush();
std.os.exit(1);
};
tmpdir_path_buf[tmpdir_path.len] = 0;
var tmpdir_z = tmpdir_path_buf[0..tmpdir_path.len :0];
std.os.chdirZ(tmpdir_z) catch {};
const tmpname = "bun.zip";
var zip_file = save_dir.createFileZ(tmpname, .{ .truncate = true }) catch |err| {
Output.prettyErrorln("<r><red>error:<r> Failed to open temp file {s}", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
{
_ = zip_file.writeAll(bytes) catch |err| {
save_dir.deleteFileZ(tmpname) catch {};
Output.prettyErrorln("<r><red>error:<r> Failed to write to temp file {s}", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
zip_file.close();
}
{
defer {
save_dir.deleteFileZ(tmpname) catch {};
}
const unzip_exe = which(&unzip_path_buf, env_loader.map.get("PATH") orelse "", filesystem.top_level_dir, "unzip") orelse {
save_dir.deleteFileZ(tmpname) catch {};
Output.prettyErrorln("<r><red>error:<r> Failed to locate \"unzip\" in PATH. bun upgrade needs \"unzip\" to work.", .{});
Output.flush();
std.os.exit(1);
};
// We could just embed libz2
// however, we want to be sure that xattrs are preserved
// xattrs are used for codesigning
// it'd be easy to mess that up
var unzip_argv = [_]string{
std.mem.span(unzip_exe),
"-q",
"-o",
std.mem.span(tmpname),
};
var unzip_process = try std.ChildProcess.init(&unzip_argv, ctx.allocator);
defer unzip_process.deinit();
unzip_process.cwd = tmpdir_path;
unzip_process.stdin_behavior = .Inherit;
unzip_process.stdout_behavior = .Inherit;
unzip_process.stderr_behavior = .Inherit;
const unzip_result = unzip_process.spawnAndWait() catch |err| {
save_dir.deleteFileZ(tmpname) catch {};
Output.prettyErrorln("<r><red>error:<r> Failed to spawn unzip due to {s}.", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
if (unzip_result.Exited != 0) {
Output.prettyErrorln("<r><red>Unzip failed<r> (exit code: {d})", .{unzip_result.Exited});
Output.flush();
save_dir.deleteFileZ(tmpname) catch {};
std.os.exit(1);
}
}
{
var verify_argv = [_]string{
exe_subpath,
"--version",
};
const result = std.ChildProcess.exec(.{
.allocator = ctx.allocator,
.argv = &verify_argv,
.cwd = tmpdir_path,
.max_output_bytes = 128,
}) catch |err| {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln("<r><red>error<r> Failed to verify Bun {s}<r>)", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
if (result.term.Exited != 0) {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln("<r><red>error<r> failed to verify Bun<r> (exit code: {d})", .{result.term.Exited});
Output.flush();
std.os.exit(1);
}
if (!strings.eql(std.mem.trim(u8, result.stdout, " \n\r\t"), version_name)) {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln(
"<r><red>error<r>: The downloaded version of Bun (<red>{s}<r>) doesn't match the expected version (<b>{s}<r>)<r>. Cancelled upgrade",
.{
result.stdout[0..@minimum(result.stdout.len, 128)],
version_name,
},
);
Output.flush();
std.os.exit(1);
}
}
var destination_executable_ = std.fs.selfExePath(&current_executable_buf) catch return error.UpgradeFailedMissingExecutable;
current_executable_buf[destination_executable_.len] = 0;
var target_filename_ = std.fs.path.basename(destination_executable_);
var target_filename = current_executable_buf[destination_executable_.len - target_filename_.len ..][0..target_filename_.len :0];
var target_dir_ = std.fs.path.dirname(destination_executable_) orelse return error.UpgradeFailedBecauseOfMissingExecutableDir;
// safe because the slash will no longer be in use
current_executable_buf[target_dir_.len] = 0;
var target_dirname = current_executable_buf[0..target_dir_.len :0];
var target_dir = std.fs.openDirAbsoluteZ(target_dirname, .{ .iterate = true }) catch |err| {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln("<r><red>error:<r> Failed to open Bun's install directory {s}", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
if (env_loader.map.get("BUN_DRY_RUN") == null) {
C.moveFileZ(save_dir.fd, exe_subpath, target_dir.fd, target_filename) catch |err| {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln("<r><red>error:<r> Failed to move new version of Bun due to {s}. You could try the install script instead:\n curl -L https://bun.sh/install | bash", .{@errorName(err)});
Output.flush();
std.os.exit(1);
};
}
Output.printStartEnd(ctx.start_time, std.time.nanoTimestamp());
Output.prettyErrorln("<r> Upgraded.\n\n<b><green>Welcome to Bun v{s}!<r>\n\n Report any bugs:\n https://github.com/Jarred-Sumner/bun/issues\n\n What's new:\n https://github.com/Jarred-Sumner/bun/releases/tag/{s}<r>", .{ version_name, version.tag });
Output.flush();
return;
}
}
};

View File

@@ -63,7 +63,7 @@ pub fn Param(comptime Id: type) type {
/// Takes a string and parses it to a Param(Help).
/// This is the reverse of 'help' but for at single parameter only.
pub fn parseParam(line: []const u8) !Param(Help) {
@setEvalBranchQuota(9999);
@setEvalBranchQuota(999999);
var found_comma = false;
var it = mem.tokenize(u8, line, " \t");

View File

@@ -21,6 +21,6 @@ pub const isDebug = std.builtin.Mode.Debug == std.builtin.mode;
pub const isRelease = std.builtin.Mode.Debug != std.builtin.mode and !isTest;
pub const isTest = std.builtin.is_test;
pub const isLinux = std.Target.current.os.tag == .linux;
pub const isAarch64 = std.Target.current.cpu.arch == .aarch64;
pub const isAarch64 = std.Target.current.cpu.arch.isAARCH64();
pub const analytics_url = if (isDebug) "http://localhost:4000/events" else "http://i.bun.sh/events";

View File

@@ -33,6 +33,8 @@ pub const Lexer = struct {
has_newline_before: bool = true,
was_quoted: bool = false,
is_process_env: bool = false,
pub inline fn codepoint(this: *const Lexer) CodePoint {
return this.cursor.c;
}
@@ -189,9 +191,11 @@ pub const Lexer = struct {
)];
},
' ' => {
any_spaces = true;
while (lexer.codepoint() == ' ') lexer.step();
continue;
if (!lexer.is_process_env) {
any_spaces = true;
while (lexer.codepoint() == ' ') lexer.step();
continue;
}
},
else => {},
}
@@ -364,6 +368,8 @@ pub const Loader = struct {
@".env.production": ?logger.Source = null,
@".env": ?logger.Source = null,
quiet: bool = false,
did_load_process: bool = false,
const empty_string_value: string = "\"\"";
@@ -439,7 +445,7 @@ pub const Loader = struct {
const EString = js_ast.E.String;
var e_strings = try allocator.alloc(js_ast.E.String, e_strings_to_allocate);
var e_strings = try allocator.alloc(js_ast.E.String, e_strings_to_allocate * 2);
errdefer allocator.free(e_strings);
errdefer allocator.free(key_buf);
var key_fixed_allocator = std.heap.FixedBufferAllocator.init(key_buf);
@@ -556,7 +562,7 @@ pub const Loader = struct {
var source = logger.Source.initPathString("process.env", "");
for (std.os.environ) |env| {
source.contents = std.mem.span(env);
Parser.parse(&source, this.allocator, this.map, true);
Parser.parse(&source, this.allocator, this.map, true, true);
}
this.did_load_process = true;
@@ -570,7 +576,7 @@ pub const Loader = struct {
// mostly for tests
pub fn loadFromString(this: *Loader, str: string, comptime overwrite: bool) void {
var source = logger.Source.initPathString("test", str);
Parser.parse(&source, this.allocator, this.map, overwrite);
Parser.parse(&source, this.allocator, this.map, overwrite, true);
std.mem.doNotOptimizeAway(&source);
}
@@ -610,7 +616,7 @@ pub const Loader = struct {
Analytics.Features.dotenv = true;
}
this.printLoaded(start);
if (!this.quiet) this.printLoaded(start);
}
pub fn printLoaded(this: *Loader, start: i128) void {
@@ -697,6 +703,7 @@ pub const Loader = struct {
this.allocator,
this.map,
override,
false,
);
@field(this, base) = source;
@@ -709,8 +716,10 @@ pub const Parser = struct {
allocator: *std.mem.Allocator,
map: *Map,
comptime override: bool,
is_process: bool,
) void {
var lexer = Lexer.init(source);
lexer.is_process_env = is_process;
var fbs = std.io.fixedBufferStream(&temporary_nested_value_buffer);
var writer = fbs.writer();
var temp_variable_i: u16 = 0;
@@ -748,6 +757,23 @@ pub const Map = struct {
map: HashTable,
pub fn cloneToBufMap(this: *Map, allocator: *std.mem.Allocator) !std.BufMap {
var buf_map = std.BufMap.init(allocator);
const Convert = struct {
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
};
var iter_ = this.map.iterator();
while (iter_.next()) |entry| {
try buf_map.putMove(Convert.constStrToU8(entry.key_ptr.*), Convert.constStrToU8(entry.value_ptr.*));
}
return buf_map;
}
pub inline fn init(allocator: *std.mem.Allocator) Map {
return Map{ .map = HashTable.init(allocator) };
}
@@ -764,8 +790,9 @@ pub const Map = struct {
var iterator = self.map.iterator();
_ = try writer.writeAll("{");
while (iterator.next()) |entry| {
_ = try writer.writeAll("\n ");
std.json.stringify(entry.key_ptr.*, options, writer) catch unreachable;
_ = try writer.writeAll(": ");
@@ -777,7 +804,7 @@ pub const Map = struct {
}
}
try writer.writeAll("}");
try writer.writeAll("\n}");
}
pub inline fn get(
@@ -852,6 +879,7 @@ test "DotEnv Loader - basic" {
default_allocator,
&map,
true,
false,
);
try expectString(map.get("NESTED_VALUES_RESPECT_ESCAPING").?, "\\$API_KEY");
@@ -935,8 +963,8 @@ test "DotEnv Loader - copyForDefine" {
);
try expect(env_defines.get("process.env.BACON") != null);
try expectString(env_defines.get("process.env.BACON").?.value.e_string.slice8(), "false");
try expectString(env_defines.get("process.env.HOSTNAME").?.value.e_string.slice8(), "example.com");
try expectString(env_defines.get("process.env.BACON").?.value.e_string.utf8, "false");
try expectString(env_defines.get("process.env.HOSTNAME").?.value.e_string.utf8, "example.com");
try expect(env_defines.get("process.env.THIS_SHOULDNT_BE_IN_DEFINES_MAP") != null);
user_defines = UserDefine.init(default_allocator);
@@ -944,6 +972,6 @@ test "DotEnv Loader - copyForDefine" {
buf = try loader.copyForDefine(UserDefine, &user_defines, UserDefinesArray, &env_defines, framework, .prefix, "HO", default_allocator);
try expectString(env_defines.get("process.env.HOSTNAME").?.value.e_string.slice8(), "example.com");
try expectString(env_defines.get("process.env.HOSTNAME").?.value.e_string.utf8, "example.com");
try expect(env_defines.get("process.env.THIS_SHOULDNT_BE_IN_DEFINES_MAP") == null);
}

View File

@@ -1 +1 @@
7aa588534c09f455
2bbe5942da63d2ba

View File

@@ -493,7 +493,7 @@ pub const FileSystem = struct {
pub var tmpdir_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const PLATFORM_TMP_DIR: string = switch (std.Target.current.os.tag) {
.windows => "%TMPDIR%",
.windows => "TMPDIR",
.macos => "/private/tmp",
else => "/tmp",
};

View File

@@ -1205,6 +1205,9 @@ pub const RequestContext = struct {
var module_map = ZigGlobalObject.getModuleRegistryMap(vm.global);
if (!VM.isJITEnabled()) {
Output.prettyErrorln("<red><r>warn:<r> JIT is disabled,,,this is a bug in Bun and/or a permissions problem. JS will run slower.", .{});
if (vm.bundler.env.map.get("BUN_CRASH_WITHOUT_JIT") != null) {
Global.crash();
}
}
while (true) {
@@ -2631,6 +2634,10 @@ pub const Server = struct {
Analytics.Features.filesystem_router = server.bundler.router != null;
Analytics.Features.bunjs = server.transform_options.node_modules_bundle_path_server != null;
const UpgradeCheckerThread = @import("./cli/upgrade_command.zig").UpgradeCheckerThread;
UpgradeCheckerThread.spawn(server.bundler.env);
var did_init = false;
while (!did_init) {
defer Output.flush();

View File

@@ -44,7 +44,7 @@ body_size: u32 = 0,
read_count: u32 = 0,
remaining_redirect_count: i8 = 127,
redirect_buf: [2048]u8 = undefined,
disable_shutdown: bool = false,
disable_shutdown: bool = true,
timeout: u32 = 0,
progress_node: ?*std.Progress.Node = null,

View File

@@ -391,7 +391,7 @@ pub const Platform = enum {
pub const Extensions = struct {
pub const In = struct {
pub const JavaScript = [_]string{ ".js", ".ts", ".tsx", ".jsx", ".json" };
pub const JavaScript = [_]string{ ".js", ".cjs", ".mts", ".cts", ".ts", ".tsx", ".jsx", ".json" };
};
pub const Out = struct {
pub const JavaScript = [_]string{
@@ -601,10 +601,16 @@ pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
.{ ".jsx", Loader.jsx },
.{ ".json", Loader.json },
.{ ".js", Loader.jsx },
.{ ".mjs", Loader.js },
.{ ".cjs", Loader.js },
.{ ".css", Loader.css },
.{ ".ts", Loader.ts },
.{ ".tsx", Loader.tsx },
.{ ".mts", Loader.ts },
.{ ".cts", Loader.ts },
});
// https://webpack.js.org/guides/package-exports/#reference-syntax
@@ -891,7 +897,15 @@ pub fn loadersFromTransformOptions(allocator: *std.mem.Allocator, _loaders: ?Api
input_loaders.extensions,
loader_values,
);
const default_loader_ext = comptime [_]string{ ".jsx", ".json", ".js", ".mjs", ".css", ".ts", ".tsx" };
const default_loader_ext = comptime [_]string{
".jsx", ".json",
".js", ".mjs",
".cjs", ".css",
// https://devblogs.microsoft.com/typescript/announcing-typescript-4-5-beta/#new-file-extensions
".ts", ".tsx",
".mts", ".cts",
};
inline for (default_loader_ext) |ext| {
if (!loaders.contains(ext)) {

View File

@@ -18,6 +18,8 @@ threadlocal var hashy: [2048]u8 = undefined;
pub const MacroImportReplacementMap = std.StringArrayHashMap(string);
pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap);
const ScriptsMap = std.StringArrayHashMap(string);
pub const PackageJSON = struct {
pub const LoadFramework = enum {
none,
@@ -57,6 +59,8 @@ pub const PackageJSON = struct {
version: string = "",
hash: u32 = 0xDEADBEEF,
scripts: ?*ScriptsMap = null,
always_bundle: []string = &.{},
macros: MacroMap = MacroMap{},
@@ -440,6 +444,7 @@ pub const PackageJSON = struct {
input_path: string,
dirname_fd: StoredFileDescriptorType,
comptime generate_hash: bool,
comptime include_scripts: bool,
) ?PackageJSON {
// TODO: remove this extra copy
@@ -690,6 +695,40 @@ pub const PackageJSON = struct {
}
}
if (include_scripts) {
read_scripts: {
if (json.asProperty("scripts")) |scripts_prop| {
if (scripts_prop.expr.data == .e_object) {
const scripts_obj = scripts_prop.expr.data.e_object;
var count: usize = 0;
for (scripts_obj.properties) |prop| {
const key = prop.key.?.asString(r.allocator) orelse continue;
const value = prop.value.?.asString(r.allocator) orelse continue;
count += @as(usize, @boolToInt(key.len > 0 and value.len > 0));
}
if (count == 0) break :read_scripts;
var scripts = ScriptsMap.init(r.allocator);
scripts.ensureUnusedCapacity(count) catch break :read_scripts;
for (scripts_obj.properties) |prop| {
const key = prop.key.?.asString(r.allocator) orelse continue;
const value = prop.value.?.asString(r.allocator) orelse continue;
if (!(key.len > 0 and value.len > 0)) continue;
scripts.putAssumeCapacity(key, value);
}
package_json.scripts = r.allocator.create(ScriptsMap) catch unreachable;
package_json.scripts.?.* = scripts;
}
}
}
}
// TODO: side effects
// TODO: exports map

View File

@@ -207,6 +207,7 @@ threadlocal var _open_dirs: [256]std.fs.Dir = undefined;
threadlocal var resolve_without_remapping_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var index_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var dir_info_uncached_filename_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var node_bin_path: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var dir_info_uncached_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var tsconfig_base_url_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var relative_abs_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
@@ -324,6 +325,12 @@ pub const LoadResult = struct {
// This is a global so even if multiple resolvers are created, the mutex will still work
var resolver_Mutex: Mutex = undefined;
var resolver_Mutex_loaded: bool = false;
const BinFolderArray = std.BoundedArray(string, 128);
var bin_folders: BinFolderArray = undefined;
var bin_folders_lock: Mutex = Mutex.init();
var bin_folders_loaded: bool = false;
// TODO:
// - Fix "browser" field mapping
// - Consider removing the string list abstraction?
@@ -336,6 +343,9 @@ pub const Resolver = struct {
node_module_bundle: ?*NodeModuleBundle,
extension_order: []const string = undefined,
care_about_bin_folder: bool = false,
care_about_scripts: bool = false,
debug_logs: ?DebugLogs = null,
elapsed: i128 = 0, // tracing
@@ -1361,8 +1371,19 @@ pub const Resolver = struct {
return path.text;
}
pub fn binDirs(r: *const ThisResolver) []const string {
if (!bin_folders_loaded) return &[_]string{};
return bin_folders.constSlice();
}
pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true) orelse return null;
var pkg: PackageJSON = undefined;
if (!r.care_about_scripts) {
pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, false) orelse return null;
} else {
pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, true) orelse return null;
}
var _pkg = try r.allocator.create(PackageJSON);
_pkg.* = pkg;
return _pkg;
@@ -2477,6 +2498,60 @@ pub const Resolver = struct {
info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
}
}
if (r.care_about_bin_folder) {
append_bin_dir: {
if (info.has_node_modules) {
if (entries.getComptimeQuery("node_modules")) |q| {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
bin_folders = BinFolderArray.init(0) catch unreachable;
}
const this_dir = std.fs.Dir{ .fd = fd };
var file = this_dir.openDirZ("node_modules/.bin", .{}) catch break :append_bin_dir;
defer file.close();
var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir;
bin_folders_lock.lock();
defer bin_folders_lock.unlock();
for (bin_folders.constSlice()) |existing_folder| {
if (strings.eql(existing_folder, bin_path)) {
break :append_bin_dir;
}
}
bin_folders.append(r.fs.dirname_store.append([]u8, bin_path) catch break :append_bin_dir) catch {};
}
}
if (info.is_node_modules) {
if (entries.getComptimeQuery(".bin")) |q| {
if (q.entry.kind(rfs) == .dir) {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
bin_folders = BinFolderArray.init(0) catch unreachable;
}
const this_dir = std.fs.Dir{ .fd = fd };
var file = this_dir.openDirZ(".bin", .{}) catch break :append_bin_dir;
defer file.close();
var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir;
bin_folders_lock.lock();
defer bin_folders_lock.unlock();
for (bin_folders.constSlice()) |existing_folder| {
if (strings.eql(existing_folder, bin_path)) {
break :append_bin_dir;
}
}
bin_folders.append(r.fs.dirname_store.append([]u8, bin_path) catch break :append_bin_dir) catch {};
}
}
}
}
}
// }
if (parent != null) {

View File

@@ -1 +1 @@
d0564ad3b9e88744
a85b24fd6904248e

View File

@@ -659,9 +659,9 @@ pub const Connection = struct {
pub fn close(this: *Connection) !void {
if (!this.disable_shutdown) {
_ = s2n_shutdown(this.conn, &blocked_status);
Pool.put(this.node);
}
std.os.closeSocket(this.fd);
Pool.put(this.node);
}
pub const Writer = std.io.Writer(*Connection, WriteError, write);

View File

@@ -175,6 +175,31 @@ pub fn copyLowercase(in: string, out: []u8) string {
return out[0..in.len];
}
test "eqlComptimeCheckLen" {
try std.testing.expectEqual(eqlComptime("bun-darwin-aarch64.zip", "bun-darwin-aarch64.zip"), true);
const sizes = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 23, 22, 24 };
inline for (sizes) |size| {
var buf: [size]u8 = undefined;
std.mem.set(u8, &buf, 'a');
var buf_copy: [size]u8 = undefined;
std.mem.set(u8, &buf_copy, 'a');
var bad: [size]u8 = undefined;
std.mem.set(u8, &bad, 'b');
try std.testing.expectEqual(std.mem.eql(u8, &buf, &buf_copy), eqlComptime(&buf, comptime brk: {
var buf_copy_: [size]u8 = undefined;
std.mem.set(u8, &buf_copy_, 'a');
break :brk buf_copy_;
}));
try std.testing.expectEqual(std.mem.eql(u8, &buf, &bad), eqlComptime(&bad, comptime brk: {
var buf_copy_: [size]u8 = undefined;
std.mem.set(u8, &buf_copy_, 'a');
break :brk buf_copy_;
}));
}
}
test "copyLowercase" {
{
var in = "Hello, World!";
@@ -324,137 +349,55 @@ pub fn eqlComptimeIgnoreLen(self: string, comptime alt: anytype) bool {
return eqlComptimeCheckLen(self, alt, false);
}
inline fn eqlComptimeCheckLen(self: string, comptime alt: anytype, comptime check_len: bool) bool {
switch (comptime alt.len) {
0 => {
@compileError("Invalid size passed to eqlComptime");
},
2 => {
const check = comptime std.mem.readIntNative(u16, alt[0..alt.len]);
return ((comptime !check_len) or self.len == alt.len) and std.mem.readIntNative(u16, self[0..2]) == check;
},
1, 3 => {
if ((comptime check_len) and alt.len != self.len) {
return false;
}
inline fn eqlComptimeCheckLen(a: string, comptime b: anytype, comptime check_len: bool) bool {
if (comptime check_len) {
if (comptime b.len == 0) {
return a.len == 0;
}
inline for (alt) |c, i| {
if (self[i] != c) return false;
}
return true;
},
4 => {
const check = comptime std.mem.readIntNative(u32, alt[0..alt.len]);
return ((comptime !check_len) or self.len == alt.len) and std.mem.readIntNative(u32, self[0..4]) == check;
},
6 => {
const first = std.mem.readIntNative(u32, alt[0..4]);
const second = std.mem.readIntNative(u16, alt[4..6]);
return self.len == alt.len and first == std.mem.readIntNative(u32, self[0..4]) and
second == std.mem.readIntNative(u16, self[4..6]);
},
5, 7 => {
const check = comptime std.mem.readIntNative(u32, alt[0..4]);
if (((comptime check_len) and
self.len != alt.len) or
std.mem.readIntNative(u32, self[0..4]) != check)
{
return false;
}
const remainder = self[4..];
inline for (alt[4..]) |c, i| {
if (remainder[i] != c) return false;
}
return true;
},
8 => {
const check = comptime std.mem.readIntNative(u64, alt[0..alt.len]);
return ((comptime !check_len) or self.len == alt.len) and std.mem.readIntNative(u64, self[0..8]) == check;
},
9...11 => {
const first = std.mem.readIntNative(u64, alt[0..8]);
if (((comptime check_len) and self.len != alt.len) or first != std.mem.readIntNative(u64, self[0..8])) {
return false;
}
inline for (alt[8..]) |c, i| {
if (self[i + 8] != c) return false;
}
return true;
},
12 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u32, alt[8..12]);
return ((comptime !check_len) or self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u32, self[8..12]);
},
13...15 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u32, alt[8..12]);
if (((comptime !check_len) or self.len != alt.len) or first != std.mem.readIntNative(u64, self[0..8]) or second != std.mem.readIntNative(u32, self[8..12])) {
return false;
}
inline for (alt[13..]) |c, i| {
if (self[i + 13] != c) return false;
}
return true;
},
16 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
return ((comptime !check_len) or self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u64, self[8..16]);
},
17 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
return ((comptime !check_len) or self.len == alt.len) and
first == std.mem.readIntNative(u64, self[0..8]) and second ==
std.mem.readIntNative(u64, self[8..16]) and
alt[16] == self[16];
},
18 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
const third = comptime std.mem.readIntNative(u16, alt[16..18]);
return ((comptime !check_len) or self.len == alt.len) and
first == std.mem.readIntNative(u64, self[0..8]) and second ==
std.mem.readIntNative(u64, self[8..16]) and
std.mem.readIntNative(u16, self[16..18]) == third;
},
23 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
return ((comptime !check_len) or self.len == alt.len) and
first == std.mem.readIntNative(u64, self[0..8]) and
second == std.mem.readIntNative(u64, self[8..16]) and
eqlComptimeIgnoreLen(self[16..23], comptime alt[16..23]);
},
22 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
return ((comptime !check_len) or self.len == alt.len) and
first == std.mem.readIntNative(u64, self[0..8]) and
second == std.mem.readIntNative(u64, self[8..16]) and
eqlComptimeIgnoreLen(self[16..22], comptime alt[16..22]);
},
24 => {
const first = comptime std.mem.readIntNative(u64, alt[0..8]);
const second = comptime std.mem.readIntNative(u64, alt[8..16]);
const third = comptime std.mem.readIntNative(u64, alt[16..24]);
return ((comptime !check_len) or self.len == alt.len) and
first == std.mem.readIntNative(u64, self[0..8]) and
second == std.mem.readIntNative(u64, self[8..16]) and
third == std.mem.readIntNative(u64, self[16..24]);
},
else => {
@compileError(alt ++ " is too long.");
},
switch (a.len) {
b.len => {},
else => return false,
}
}
const len = comptime b.len;
comptime var dword_length = b.len >> 3;
comptime var b_ptr: usize = 0;
inline while (dword_length > 0) : (dword_length -= 1) {
const slice = comptime if (@typeInfo(@TypeOf(b)) != .Pointer) b else std.mem.span(b);
if (@bitCast(usize, a[b_ptr..][0..@sizeOf(usize)].*) != comptime @bitCast(usize, (slice[b_ptr..])[0..@sizeOf(usize)].*))
return false;
comptime b_ptr += @sizeOf(usize);
if (comptime b_ptr == b.len) return true;
}
if (comptime @sizeOf(usize) == 8) {
if (comptime (len & 4) != 0) {
const slice = comptime if (@typeInfo(@TypeOf(b)) != .Pointer) b else std.mem.span(b);
if (@bitCast(u32, a[b_ptr..][0..@sizeOf(u32)].*) != comptime @bitCast(u32, (slice[b_ptr..])[0..@sizeOf(u32)].*))
return false;
comptime b_ptr += @sizeOf(u32);
if (comptime b_ptr == b.len) return true;
}
}
if (comptime (len & 2) != 0) {
const slice = comptime if (@typeInfo(@TypeOf(b)) != .Pointer) b else std.mem.span(b);
if (@bitCast(u16, a[b_ptr..][0..@sizeOf(u16)].*) != comptime @bitCast(u16, slice[b_ptr .. b_ptr + @sizeOf(u16)].*))
return false;
comptime b_ptr += @sizeOf(u16);
if (comptime b_ptr == b.len) return true;
}
if ((comptime ( len & 1) != 0) and a[b_ptr] != comptime b[b_ptr]) return false;
return true;
}
pub inline fn append(allocator: *std.mem.Allocator, self: string, other: string) !string {